In [4]:
!pip install wget
!pip install plotly==4.5.0
!pip install folium
Collecting wget
  Downloading https://files.pythonhosted.org/packages/47/6a/62e288da7bcda82b935ff0c6cfe542970f04e29c756b0e147251b2fb251f/wget-3.2.zip
Building wheels for collected packages: wget
  Running setup.py bdist_wheel for wget ... done
  Stored in directory: /root/.cache/pip/wheels/40/15/30/7d8f7cea2902b4db79e3fea550d7d7b85ecb27ef992b618f3f
Successfully built wget
Installing collected packages: wget
Successfully installed wget-3.2
Collecting plotly==4.5.0
  Downloading https://files.pythonhosted.org/packages/06/e1/88762ade699460dc3229c890f9845d16484a40955a590b65052f0958613c/plotly-4.5.0-py2.py3-none-any.whl (7.1MB)
    100% |████████████████████████████████| 7.1MB 4.7MB/s eta 0:00:01    89% |████████████████████████████▋   | 6.3MB 35.9MB/s eta 0:00:01
Collecting retrying>=1.3.3 (from plotly==4.5.0)
  Downloading https://files.pythonhosted.org/packages/44/ef/beae4b4ef80902f22e3af073397f079c96969c69b2c7d52a57ea9ae61c9d/retrying-1.3.3.tar.gz
Requirement already satisfied: six in /opt/conda/lib/python3.6/site-packages (from plotly==4.5.0) (1.11.0)
Building wheels for collected packages: retrying
  Running setup.py bdist_wheel for retrying ... done
  Stored in directory: /root/.cache/pip/wheels/d7/a9/33/acc7b709e2a35caa7d4cae442f6fe6fbf2c43f80823d46460c
Successfully built retrying
Installing collected packages: retrying, plotly
  Found existing installation: plotly 2.0.15
    Uninstalling plotly-2.0.15:
      Successfully uninstalled plotly-2.0.15
Successfully installed plotly-4.5.0 retrying-1.3.3
Collecting folium
  Downloading https://files.pythonhosted.org/packages/fd/a0/ccb3094026649cda4acd55bf2c3822bb8c277eb11446d13d384e5be35257/folium-0.10.1-py2.py3-none-any.whl (91kB)
    100% |████████████████████████████████| 92kB 3.3MB/s ta 0:00:011
Requirement already satisfied: jinja2>=2.9 in /opt/conda/lib/python3.6/site-packages (from folium) (2.10)
Requirement already satisfied: numpy in /opt/conda/lib/python3.6/site-packages (from folium) (1.12.1)
Collecting branca>=0.3.0 (from folium)
  Downloading https://files.pythonhosted.org/packages/81/6d/31c83485189a2521a75b4130f1fee5364f772a0375f81afff619004e5237/branca-0.4.0-py3-none-any.whl
Requirement already satisfied: requests in /opt/conda/lib/python3.6/site-packages (from folium) (2.18.4)
Requirement already satisfied: MarkupSafe>=0.23 in /opt/conda/lib/python3.6/site-packages (from jinja2>=2.9->folium) (1.0)
Requirement already satisfied: six in /opt/conda/lib/python3.6/site-packages (from branca>=0.3.0->folium) (1.11.0)
Requirement already satisfied: chardet<3.1.0,>=3.0.2 in /opt/conda/lib/python3.6/site-packages (from requests->folium) (3.0.4)
Requirement already satisfied: idna<2.7,>=2.5 in /opt/conda/lib/python3.6/site-packages (from requests->folium) (2.6)
Requirement already satisfied: urllib3<1.23,>=1.21.1 in /opt/conda/lib/python3.6/site-packages (from requests->folium) (1.22)
Requirement already satisfied: certifi>=2017.4.17 in /opt/conda/lib/python3.6/site-packages (from requests->folium) (2019.11.28)
Installing collected packages: branca, folium
Successfully installed branca-0.4.0 folium-0.10.1
In [5]:
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import wget
import datetime
from datetime import date,timedelta
import glob
import numpy as np
import os
import plotly.express as px
import folium
from folium import plugins
from folium.plugins import HeatMap
from folium.plugins import HeatMapWithTime

from sklearn.preprocessing import  MinMaxScaler
from sklearn.linear_model import LinearRegression, BayesianRidge
from sklearn.model_selection import RandomizedSearchCV, train_test_split
from sklearn.preprocessing import PolynomialFeatures
from sklearn.tree import DecisionTreeRegressor
from sklearn.model_selection import GridSearchCV
from sklearn.svm import SVR
from sklearn.metrics import mean_squared_error, mean_absolute_error

from keras.layers import Input, Dense, Activation, LeakyReLU
from keras import models
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint
Using TensorFlow backend.
In [4]:
###Remove old data files and scrap latest files from web

! rm datasets/*.csv

urls = [
        'https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_confirmed_US.csv',
        'https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_covid19_deaths_US.csv',
        'https://raw.githubusercontent.com/CSSEGISandData/COVID-19/web-data/data/cases_country.csv',
        'https://raw.githubusercontent.com/CSSEGISandData/COVID-19/web-data/data/cases_time.csv'
       ]

for url in urls:
    filename = wget.download(url,out='datasets/')
In [6]:
### Read in datasets

conf_usa_df = pd.read_csv('datasets/time_series_covid19_confirmed_US.csv')
deaths_usa_df = pd.read_csv('datasets/time_series_covid19_deaths_US.csv')
cases_country_df = pd.read_csv('datasets/cases_country.csv')
cases_time_df = pd.read_csv('datasets/cases_time.csv')
In [7]:
conf_usa_df.head()
Out[7]:
UID iso2 iso3 code3 FIPS Admin2 Province_State Country_Region Lat Long_ ... 4/1/20 4/2/20 4/3/20 4/4/20 4/5/20 4/6/20 4/7/20 4/8/20 4/9/20 4/10/20
0 16 AS ASM 16 60.0 NaN American Samoa US -14.2710 -170.1320 ... 0 0 0 0 0 0 0 0 0 0
1 316 GU GUM 316 66.0 NaN Guam US 13.4443 144.7937 ... 77 82 84 93 112 113 121 121 128 130
2 580 MP MNP 580 69.0 NaN Northern Mariana Islands US 15.0979 145.6739 ... 6 6 6 6 6 6 6 6 6 6
3 630 PR PRI 630 72.0 NaN Puerto Rico US 18.2208 -66.5901 ... 286 316 316 452 475 513 573 620 683 725
4 850 VI VIR 850 78.0 NaN Virgin Islands US 18.3358 -64.8963 ... 30 30 37 40 42 43 43 45 45 50

5 rows × 91 columns

In [8]:
deaths_usa_df.head()
Out[8]:
UID iso2 iso3 code3 FIPS Admin2 Province_State Country_Region Lat Long_ ... 4/1/20 4/2/20 4/3/20 4/4/20 4/5/20 4/6/20 4/7/20 4/8/20 4/9/20 4/10/20
0 16 AS ASM 16 60.0 NaN American Samoa US -14.2710 -170.1320 ... 0 0 0 0 0 0 0 0 0 0
1 316 GU GUM 316 66.0 NaN Guam US 13.4443 144.7937 ... 3 3 4 4 4 4 4 4 4 4
2 580 MP MNP 580 69.0 NaN Northern Mariana Islands US 15.0979 145.6739 ... 1 1 1 1 1 1 1 1 1 1
3 630 PR PRI 630 72.0 NaN Puerto Rico US 18.2208 -66.5901 ... 11 12 15 18 20 21 23 24 33 39
4 850 VI VIR 850 78.0 NaN Virgin Islands US 18.3358 -64.8963 ... 0 0 0 0 1 1 1 1 1 1

5 rows × 92 columns

In [9]:
cases_country_df.head()
Out[9]:
Country_Region Last_Update Lat Long_ Confirmed Deaths Recovered Active
0 Australia 2020-04-11 19:18:03 -25.0000 133.0000 6303 57 1806 4440
1 Austria 2020-04-11 19:10:21 47.5162 14.5501 13799 337 6604 6858
2 Canada 2020-04-11 19:17:46 60.0010 -95.0010 23197 649 6548 0
3 China 2020-04-11 12:53:33 30.5928 114.3055 83014 3343 77877 1794
4 Denmark 2020-04-11 19:10:21 56.0000 10.0000 6191 260 2111 3820
In [10]:
cases_time_df.head()
Out[10]:
Country_Region Last_Update Confirmed Deaths Recovered Active Delta_Confirmed Delta_Recovered
0 Afghanistan 1/22/20 0 0 NaN NaN 0 NaN
1 Afghanistan 1/23/20 0 0 NaN NaN 0 NaN
2 Afghanistan 1/24/20 0 0 NaN NaN 0 NaN
3 Afghanistan 1/25/20 0 0 NaN NaN 0 NaN
4 Afghanistan 1/26/20 0 0 NaN NaN 0 NaN
In [12]:
###Melt df using dates in 2 dataframes

conf_usa_df_long = conf_usa_df.melt(id_vars=['UID', 'iso2', 'iso3', 'code3', 'FIPS', 'Admin2', 'Province_State', 'Country_Region', 'Lat', 'Long_', 'Combined_Key'], 
                            value_vars=conf_usa_df.columns[11:], var_name='Date', value_name='Confirmed')

deaths_usa_df_long = deaths_usa_df.melt(id_vars=['UID', 'iso2', 'iso3', 'code3', 'FIPS', 'Admin2', 'Province_State', 'Country_Region', 'Lat', 'Long_', 'Combined_Key'], 
                            value_vars=deaths_usa_df.columns[12:], var_name='Date', value_name='Deaths')

###Merge both data frames

full_table_usa = pd.concat([conf_usa_df_long, deaths_usa_df_long['Deaths']], axis=1, sort=False)
full_table_usa.head()
Out[12]:
UID iso2 iso3 code3 FIPS Admin2 Province_State Country_Region Lat Long_ Combined_Key Date Confirmed Deaths
0 16 AS ASM 16 60.0 NaN American Samoa US -14.2710 -170.1320 American Samoa, US 1/22/20 0 0
1 316 GU GUM 316 66.0 NaN Guam US 13.4443 144.7937 Guam, US 1/22/20 0 0
2 580 MP MNP 580 69.0 NaN Northern Mariana Islands US 15.0979 145.6739 Northern Mariana Islands, US 1/22/20 0 0
3 630 PR PRI 630 72.0 NaN Puerto Rico US 18.2208 -66.5901 Puerto Rico, US 1/22/20 0 0
4 850 VI VIR 850 78.0 NaN Virgin Islands US 18.3358 -64.8963 Virgin Islands, US 1/22/20 0 0
In [13]:
def drop_columns(df):
    '''
    This function takes data frame as input, checks the columns with more than 90% missing data, 
    drops them and returns the resulting data frame.
    '''
    pct_null = df.isnull().sum() / df.shape[0]
    missing_data = pct_null[pct_null > 0.90].index
    df.drop(missing_data, axis=1,inplace=True)
    
    return df
In [14]:
def rename_columns(df):
    '''
    This function take data frame as inputs, renames columns within data frame and returns resulting dataframe
    '''
    
    rename_column_dict = {
                            'Country_Region': 'Country',
                            'Last_Update': 'Date',
                            'Lat':'Latitude',
                            'Long_':'Longitude'
                        }

    df = df.rename(columns={k: v for k, v in rename_column_dict.items() if k in df.columns}, inplace=True)
    
    return df
In [15]:
def fix_country_names(df):
    '''
    This function takes data frame as input, fixes country names within Country column of data frame 
    and returns data frame with fixex country names.
    '''
    df.loc[df['Country'] == 'US', 'Country'] = 'USA'
    df.loc[df['Country'] == 'Korea, South', 'Country'] = 'South Korea'
    df.loc[df['Country'] == 'Taiwan*', 'Country'] = 'Taiwan'
    df.loc[df['Country'] == 'Congo (Kinshasa)', 'Country'] = 'Democratic Republic of the Congo'
    df.loc[df['Country'] == "Cote d'Ivoire", 'Country'] = "Côte d'Ivoire"
    df.loc[df['Country'] == 'Reunion', 'Country'] = 'Réunion'
    df.loc[df['Country'] == 'Congo (Brazzaville)', 'Country'] = 'Republic of the Congo'
    df.loc[df['Country'] == 'Bahamas, The', 'Country'] = 'Bahamas'
    df.loc[df['Country'] == 'Gambia, The', 'Country'] = 'Gambia'
    
    return df
In [16]:
def convert_date(df):
    '''
    This function takes data frame as input and converts the Date column in dataframe to datetime format
    so that relevant tranformations can be applied later on and returns resulting data frame
    '''
    df['Date'] = df['Date'].apply(pd.to_datetime)
    
    return df
In [17]:
###Apply functions to cases_time_df dataframe

drop_columns(cases_time_df)
rename_columns(cases_time_df)
fix_country_names(cases_time_df)
convert_date(cases_time_df)
cases_time_df.head()
Out[17]:
Country Date Confirmed Deaths Delta_Confirmed
0 Afghanistan 2020-01-22 0 0 0
1 Afghanistan 2020-01-23 0 0 0
2 Afghanistan 2020-01-24 0 0 0
3 Afghanistan 2020-01-25 0 0 0
4 Afghanistan 2020-01-26 0 0 0
In [18]:
###Apply functions to  cases_country_df dataframe

drop_columns(cases_country_df)
rename_columns(cases_country_df)
fix_country_names(cases_country_df)
convert_date(cases_country_df)
cases_country_df.head()
Out[18]:
Country Date Latitude Longitude Confirmed Deaths Recovered Active
0 Australia 2020-04-11 19:18:03 -25.0000 133.0000 6303 57 1806 4440
1 Austria 2020-04-11 19:10:21 47.5162 14.5501 13799 337 6604 6858
2 Canada 2020-04-11 19:17:46 60.0010 -95.0010 23197 649 6548 0
3 China 2020-04-11 12:53:33 30.5928 114.3055 83014 3343 77877 1794
4 Denmark 2020-04-11 19:10:21 56.0000 10.0000 6191 260 2111 3820
In [19]:
x = cases_country_df.sort_values(by='Confirmed',ascending=False)['Country'].values[:10]
y = cases_country_df.sort_values(by='Confirmed',ascending=False)['Confirmed'].values[:10]
plt.figure(figsize=(15,5))
plt.tick_params(size=10,labelsize = 13)
#plt.xlabel("Confirmed Cases",fontsize=15)
plt.title("Top 10 Countries (Confirmed Cases)",fontsize=20)
sns.barplot(x,y)
plt.savefig('plots/top_10_countries_confirmed_cases.png')
In [20]:
x = cases_country_df.sort_values(by='Deaths',ascending=False)['Country'].values[:10]
y = cases_country_df.sort_values(by='Deaths',ascending=False)['Deaths'].values[:10]
plt.figure(figsize=(15,5))
plt.tick_params(size=10,labelsize = 13)
plt.xlabel("Deaths",fontsize=15)
plt.title("Top 10 Countries (Deaths)",fontsize=20)
sns.barplot(x,y)
plt.savefig('plots/top_10_countries_deaths.png')
In [21]:
###Plotly express choropleth plot showing deaths across the globe

fig_gbl_deaths = px.choropleth(cases_country_df, 
                    locations='Country', 
                    color=np.log10(cases_country_df['Deaths']+1),
                    hover_name='Country',
                    hover_data=['Deaths'],
                    locationmode="country names"
                   )
                        
fig_gbl_deaths.update_geos(fitbounds="locations", visible=True)
fig_gbl_deaths.update_layout(title_text="Heat Map for Deaths Worldwide")
fig_gbl_deaths.update_coloraxes(colorbar_title="Deaths",colorscale="Oranges")
fig_gbl_deaths.show()
In [22]:
###Plot showing top 10  countires based on confirmed cases 

#Build a top 10 country list
country_list = cases_country_df.sort_values(by='Confirmed',ascending=False)['Country'].values[:10].tolist()

#Define plot
plt.figure(figsize=(20,9))
plt.title('Confirmed Coronavirus Cases over time', size=30)
plt.xlabel('No. of Days starting 01/22', size=20)
plt.ylabel('No. of Cases', size=20)
plt.grid(True,which='minor')

#Loop through each country and plot
for i,cntry in enumerate(country_list):
    xi = [i for i in range(len((cases_time_df.loc[cases_time_df['Country']==str(cntry), ['Date']])))]
    yi = cases_time_df.loc[cases_time_df['Country']==str(cntry), ['Confirmed']]   
    plt.plot(xi, yi, '-o', label=cntry, linewidth =3, markevery=[-1])
    plt.xticks(np.arange(min(xi), max(xi)+1, 7))
    plt.legend(loc = "upper left",prop={'size': 15})
    plt.grid(True)
    plt.grid(color='b', linestyle='--', linewidth=0.1)
    #plt.text(xi[-1],yi.loc[yi.index[-1]][0],yi.loc[yi.index[-1]][0],rotation=45)
plt.savefig('plots/top_10_countries_confirmed_cases_over_time.png')
In [25]:
###Apply functions to full_table_usa dataframe

drop_columns(full_table_usa)
rename_columns(full_table_usa)
fix_country_names(full_table_usa)
convert_date(full_table_usa)
full_table_usa.head()
Out[25]:
UID iso2 iso3 code3 FIPS Admin2 Province_State Country Latitude Longitude Combined_Key Date Confirmed Deaths
0 16 AS ASM 16 60.0 NaN American Samoa USA -14.2710 -170.1320 American Samoa, US 2020-01-22 0 0
1 316 GU GUM 316 66.0 NaN Guam USA 13.4443 144.7937 Guam, US 2020-01-22 0 0
2 580 MP MNP 580 69.0 NaN Northern Mariana Islands USA 15.0979 145.6739 Northern Mariana Islands, US 2020-01-22 0 0
3 630 PR PRI 630 72.0 NaN Puerto Rico USA 18.2208 -66.5901 Puerto Rico, US 2020-01-22 0 0
4 850 VI VIR 850 78.0 NaN Virgin Islands USA 18.3358 -64.8963 Virgin Islands, US 2020-01-22 0 0
In [26]:
full_table_usa.drop(['UID', 'iso2', 'iso3', 'code3', 'FIPS', 'Admin2','Country','Combined_Key'],axis=1,inplace=True)
In [27]:
full_table_usa.head()
Out[27]:
Province_State Latitude Longitude Date Confirmed Deaths
0 American Samoa -14.2710 -170.1320 2020-01-22 0 0
1 Guam 13.4443 144.7937 2020-01-22 0 0
2 Northern Mariana Islands 15.0979 145.6739 2020-01-22 0 0
3 Puerto Rico 18.2208 -66.5901 2020-01-22 0 0
4 Virgin Islands 18.3358 -64.8963 2020-01-22 0 0
In [28]:
def generateBaseMap(default_location=[39.8097, -98.5556], default_zoom_start=4):
    '''
    This function returns a base map for USA with zooming capability
    '''
    base_map = folium.Map(location=default_location,control_scale=True, zoom_start=default_zoom_start)
    return base_map

base_map=generateBaseMap()
In [30]:
###Plot heatmap using longitude and latitude on basemap

HeatMap(data=full_table_usa[['Latitude', 'Longitude']].groupby(['Latitude', 'Longitude']).sum().reset_index().values.tolist(),radius=8).add_to(base_map)
base_map
Out[30]:
In [31]:
#Derive list of lists for logitudes and latitude with respect to dates
df_case_lat_long_list = []
for i in full_table_usa.Date.sort_values().unique():
    df_case_lat_long_list.append(full_table_usa.loc[(full_table_usa.Date == i) & (full_table_usa.Confirmed > 0), ['Latitude', 'Longitude']].groupby(['Latitude', 'Longitude']).sum().reset_index().values.tolist())
In [32]:
#Plot heatmap using longitude and latitude on basemap with respect to Date
base_map = generateBaseMap(default_zoom_start=4)
HeatMapWithTime(df_case_lat_long_list, radius=5, gradient={0.2: 'blue', 0.4: 'lime', 0.6: 'orange', 1: 'red'}, min_opacity=0.5, max_opacity=0.8, use_local_extrema=True).add_to(base_map)
base_map
Out[32]:
In [37]:
#full_table_usa[(full_table_usa['Date']=='2020-03-01') & (full_table_usa['Confirmed']>0)].groupby('Date')['Confirmed'].sum().tolist()[0]
In [38]:
#full_table_usa[(full_table_usa['Date']=='2020-03-01') & (full_table_usa['Confirmed']>0)]
In [33]:
###Define features and labels for supervised learning algorithms

df_new = full_table_usa.groupby('Date')['Confirmed'].sum().reset_index()
X = np.arange(1,len(df_new.index)+1)
y = np.array(df_new['Confirmed'].values)
In [34]:
###Split data in training and testing sets

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42, shuffle=True) 
In [35]:
###parms for future predictions

days_to_predit = 10
pred_days = np.arange(1,len(X)+ days_to_predit+1)
In [36]:
###Reshape data before feeding them to regressor algorithms

X_train = X_train.reshape(-1,1)
y_train = y_train.reshape(-1,1)
X_test = X_test.reshape(-1,1)
pred_days = pred_days.reshape(-1,1)
In [37]:
###Hyper parameter tuning for DecisionTreeRegressor

#model1 = DecisionTreeRegressor(max_depth=1,random_state=0)
#parameters = {'criterion':['mse','friedman_mse','mae'],'splitter':['best','random'],'max_depth':[1,2,3,4,5,10,50],'min_samples_split':[2,3,4,5,10,50],'min_samples_leaf':[1,2,3,4,5,10,50]}
#grid_obj = GridSearchCV(model1, parameters)
#grid_fit = grid_obj.fit(X_train, y_train)
#best_clf = grid_fit.best_estimator_
#best_clf
In [38]:
###Instantiate and  fit model

model1 = DecisionTreeRegressor(criterion='mse', max_depth=10, max_features=None,
           max_leaf_nodes=None, min_impurity_decrease=0.0,
           min_impurity_split=None, min_samples_leaf=1,
           min_samples_split=5, min_weight_fraction_leaf=0.0,
           presort=False, random_state=0, splitter='random')
model1.fit(X_train, y_train)
Out[38]:
DecisionTreeRegressor(criterion='mse', max_depth=10, max_features=None,
           max_leaf_nodes=None, min_impurity_decrease=0.0,
           min_impurity_split=None, min_samples_leaf=1,
           min_samples_split=5, min_weight_fraction_leaf=0.0,
           presort=False, random_state=0, splitter='random')
In [39]:
###Check how our model is doing against testing data

model_test_pred = model1.predict(X_test)
plt.figure(figsize=(10,5))
plt.plot(y_test)
plt.plot(model_test_pred)
plt.legend(['Test Data', 'DecisionTreeRegressor Predictions'],loc = "upper left")
print('Mean absolute error:', mean_absolute_error(model_test_pred, y_test))
#print('Mean squared error:',mean_squared_error(model_test_pred, y_test))
plt.savefig('plots/decisiontreeregresor_test_data.png')
Mean absolute error: 9865.684375
In [40]:
###Instantiate, fit model

model2 = PolynomialFeatures(degree=5)
poly_X_train = model2.fit_transform(X_train)
poly_X_test = model2.fit_transform(X_test)
poly_pred_days = model2.fit_transform(pred_days)
In [41]:
###Check how our model is doing against testing data

linear_model = LinearRegression(normalize=True, fit_intercept=False)
linear_model.fit(poly_X_train, y_train)
test_linear_pred = linear_model.predict(poly_X_test)
print('Mean absolute error:', mean_absolute_error(test_linear_pred, y_test))
#print('Mean squared error:',mean_squared_error(test_linear_pred, y_test))
plt.figure(figsize=(10,5))
plt.plot(y_test)
plt.plot(test_linear_pred)
plt.legend(['Test Data', 'Polynomial Regression Predictions'],loc = "upper left" )
plt.savefig('plots/ploynomialregresor_test_data.png')
Mean absolute error: 4566.06489907
In [42]:
###Hyper parameter tuning for SVR

# use this to find the optimal parameters for SVR
#c = [0.01, 0.1, 1]
#gamma = [0.01, 0.1, 1]
#epsilon = [0.01, 0.1, 1]
#shrinking = [True, False]
#degree = [3, 4, 5, 6, 7]

#svm_grid = {'C': c, 'gamma' : gamma, 'epsilon': epsilon, 'shrinking' : shrinking, 'degree': degree}

#svm = SVR(kernel='poly')
#svm_search = RandomizedSearchCV(svm, svm_grid, scoring='neg_mean_squared_error', cv=3, return_train_score=True, n_jobs=-1, n_iter=30, verbose=1)
#svm_search.fit(X_train, y_train)
#svm_search.best_params_
In [43]:
###Instantiate, fit model

model3 = SVR(shrinking=True, kernel='poly',gamma=0.01, epsilon=1,degree=7, C=0.1)
model3.fit(X_train, y_train)
/opt/conda/lib/python3.6/site-packages/sklearn/utils/validation.py:578: DataConversionWarning:

A column-vector y was passed when a 1d array was expected. Please change the shape of y to (n_samples, ), for example using ravel().

Out[43]:
SVR(C=0.1, cache_size=200, coef0=0.0, degree=7, epsilon=1, gamma=0.01,
  kernel='poly', max_iter=-1, shrinking=True, tol=0.001, verbose=False)
In [44]:
###Check how our model is doing against testing data

model_test_pred = model3.predict(X_test)
plt.figure(figsize=(10,5))
plt.plot(y_test)
plt.plot(model_test_pred)
plt.legend(['Test Data', 'SVR Predictions'],loc = "upper left")
print('Mean absolute error:', mean_absolute_error(model_test_pred, y_test))
#print('Mean squared error:',mean_squared_error(model_test_pred, y_test))
plt.savefig('plots/supportvectorregresor_test_data.png')
Mean absolute error: 6227.41140143
In [46]:
###Get future dates to make predictions

pred_dates=[]
for num_day in pred_days:
    pred_dates.append(full_table_usa.Date.min()+ datetime.timedelta(days=int(num_day)))
In [47]:
###Plot DecisionTreeRegressor predictions against actual data 

model1_pred = model1.predict(pred_days)
plt.figure(figsize=(15, 5))
plt.plot(full_table_usa.Date.sort_values().unique(), y)
plt.plot(pred_dates, model1_pred, linestyle='dashed', color='purple')
plt.title('Coronavirus Cases over time in USA- Actual vs Predicted', size=20)
plt.xlabel('Days Since 1/22/2020', size=20)
plt.ylabel('# of Cases', size=30)
plt.legend(['Actual data', 'DecisionTreeRegressor predicted data'], prop={'size': 20})
plt.xticks(size=10)
plt.yticks(size=10)
plt.show()
plt.savefig('plots/decisiontreereg_pred.png')
<matplotlib.figure.Figure at 0x7facdfb14668>
In [48]:
###Plot Polynomial Regressor predictions against actual data

model2_pred = linear_model.predict(poly_pred_days)
plt.figure(figsize=(15, 5))
plt.plot(full_table_usa.Date.sort_values().unique(), y)
plt.plot(pred_dates, model2_pred, linestyle='dashed', color='purple')
plt.title('Coronavirus Cases over time in USA- Actual vs Predicted', size=20)
plt.xlabel('Days Since 1/22/2020', size=20)
plt.ylabel('# of Cases', size=30)
plt.legend(['Actual data', 'Polynomial predicted data'], prop={'size': 20})
plt.xticks(size=10)
plt.yticks(size=10)
plt.show()
plt.savefig('plots/ploynomialregresor_predictions.png')
<matplotlib.figure.Figure at 0x7face853df98>
In [54]:
###Plot SVR predictions against actual data

model3_pred = model3.predict(pred_days)
plt.figure(figsize=(15, 5))
plt.plot(full_table_usa.Date.sort_values().unique(), y)
plt.plot(pred_dates, model3_pred, linestyle='dashed', color='purple')
plt.title('Coronavirus Cases over time in USA- Actual vs Predicted', size=20)
plt.xlabel('Days Since 1/22/2020', size=20)
plt.ylabel('# of Cases', size=30)
plt.legend(['Actual data', 'SVR predicted data'], prop={'size': 20})
plt.xticks(size=10)
plt.yticks(size=10)
plt.show()
plt.savefig('plots/supportvectorregresor_predictions.png')
<matplotlib.figure.Figure at 0x7f952a5f14e0>
In [49]:
full_table_usa.head()
Out[49]:
Province_State Latitude Longitude Date Confirmed Deaths
0 American Samoa -14.2710 -170.1320 2020-01-22 0 0
1 Guam 13.4443 144.7937 2020-01-22 0 0
2 Northern Mariana Islands 15.0979 145.6739 2020-01-22 0 0
3 Puerto Rico 18.2208 -66.5901 2020-01-22 0 0
4 Virgin Islands 18.3358 -64.8963 2020-01-22 0 0
In [50]:
###Prepare features and lables for Deep learning model

data_y = np.array(full_table_usa.groupby('Date')['Confirmed'].sum()).astype('float32')
data_x = np.arange(1,len(data_y)+1).astype('float32')

data_y_deaths = np.array(full_table_usa.groupby('Date')['Deaths'].sum()).astype('float32')
In [85]:
###Define deep learning model using keras

#def build_model():
#    '''
#    This function  builds a model
#    '''
#    Input_dim = Input(shape=(1,))
#    Dense_l1 = Dense(75,name="Dense_l1")(Input_dim)
#    LRelu_l1 = LeakyReLU(name = "LRelu_l1")(Dense_l1)
#    Dense_l2 = Dense(150,name = "Dense_l2")(LRelu_l1)
#    LRelu_l2 = LeakyReLU(name = "LRelu_l2")(Dense_l2)
#    Dense_l3 = Dense(75,name = "Dense_l3")(LRelu_l2)
#    LRelu_l3 = LeakyReLU(name = "LRelu_l3")(Dense_l3)
#    Dense_l4 = Dense(1,name="Dense_l4")(LRelu_l3)
#    LRelu_l4 = LeakyReLU(name = "Output")(Dense_l4)
#    model = models.Model(inputs=Input_dim, outputs=LRelu_l4)
#    model.compile(optimizer=Adam(lr=0.001),loss='mean_absolute_error',metrics=['acc'])
#    return model
#model = build_model()
#model_death_pred = build_model()
In [86]:
###Save a model when loss is reduced from previous epoch

#filepath="models/weights-improvement-{epoch:02d}-{val_acc:.2f}.hdf5"
#checkpoint = ModelCheckpoint(filepath, monitor='loss', verbose=1, save_best_only=True, mode='min')
#callbacks_list = [checkpoint]

#filepath2="models2/weights-improvement-{epoch:02d}-{val_acc:.2f}.hdf5"
#checkpoint2 = ModelCheckpoint(filepath2, monitor='loss', verbose=1, save_best_only=True, mode='min')
#callbacks_list2 = [checkpoint2]
In [82]:
###Remove old models 

#! rm models/*.hdf5
#! rm models2/*.hdf5
In [348]:
###Fit the model. While doing a fit show model most of the data so that it can train well. We have too less data for deep learning

#model.fit(data_x,data_y,validation_split=0.001, epochs=1000,batch_size=1,shuffle=False, callbacks=callbacks_list,verbose=0)
Epoch 00001: loss improved from inf to 51843.89594, saving model to models/weights-improvement-01-0.00.hdf5
Epoch 00002: loss did not improve
Epoch 00003: loss did not improve
Epoch 00004: loss did not improve
Epoch 00005: loss did not improve
Epoch 00006: loss did not improve
Epoch 00007: loss did not improve
Epoch 00008: loss did not improve
Epoch 00009: loss did not improve
Epoch 00010: loss did not improve
Epoch 00011: loss did not improve
Epoch 00012: loss did not improve
Epoch 00013: loss did not improve
Epoch 00014: loss did not improve
Epoch 00015: loss did not improve
Epoch 00016: loss did not improve
Epoch 00017: loss did not improve
Epoch 00018: loss did not improve
Epoch 00019: loss did not improve
Epoch 00020: loss did not improve
Epoch 00021: loss did not improve
Epoch 00022: loss did not improve
Epoch 00023: loss did not improve
Epoch 00024: loss did not improve
Epoch 00025: loss improved from 51843.89594 to 51731.74598, saving model to models/weights-improvement-25-0.00.hdf5
Epoch 00026: loss did not improve
Epoch 00027: loss did not improve
Epoch 00028: loss did not improve
Epoch 00029: loss did not improve
Epoch 00030: loss did not improve
Epoch 00031: loss did not improve
Epoch 00032: loss did not improve
Epoch 00033: loss did not improve
Epoch 00034: loss did not improve
Epoch 00035: loss did not improve
Epoch 00036: loss did not improve
Epoch 00037: loss improved from 51731.74598 to 51717.68438, saving model to models/weights-improvement-37-0.00.hdf5
Epoch 00038: loss improved from 51717.68438 to 51692.64858, saving model to models/weights-improvement-38-0.00.hdf5
Epoch 00039: loss improved from 51692.64858 to 51675.19138, saving model to models/weights-improvement-39-0.00.hdf5
Epoch 00040: loss improved from 51675.19138 to 51659.19213, saving model to models/weights-improvement-40-0.00.hdf5
Epoch 00041: loss improved from 51659.19213 to 51636.88538, saving model to models/weights-improvement-41-0.00.hdf5
Epoch 00042: loss improved from 51636.88538 to 51619.65228, saving model to models/weights-improvement-42-0.00.hdf5
Epoch 00043: loss did not improve
Epoch 00044: loss did not improve
Epoch 00045: loss improved from 51619.65228 to 51611.88193, saving model to models/weights-improvement-45-0.00.hdf5
Epoch 00046: loss improved from 51611.88193 to 51569.10539, saving model to models/weights-improvement-46-0.00.hdf5
Epoch 00047: loss improved from 51569.10539 to 51544.40654, saving model to models/weights-improvement-47-0.00.hdf5
Epoch 00048: loss improved from 51544.40654 to 51499.07772, saving model to models/weights-improvement-48-0.00.hdf5
Epoch 00049: loss improved from 51499.07772 to 51482.27905, saving model to models/weights-improvement-49-0.00.hdf5
Epoch 00050: loss improved from 51482.27905 to 51438.39687, saving model to models/weights-improvement-50-0.00.hdf5
Epoch 00051: loss improved from 51438.39687 to 51413.63439, saving model to models/weights-improvement-51-0.00.hdf5
Epoch 00052: loss improved from 51413.63439 to 51368.28757, saving model to models/weights-improvement-52-0.00.hdf5
Epoch 00053: loss improved from 51368.28757 to 51335.36332, saving model to models/weights-improvement-53-0.00.hdf5
Epoch 00054: loss improved from 51335.36332 to 51296.31686, saving model to models/weights-improvement-54-0.00.hdf5
Epoch 00055: loss improved from 51296.31686 to 51233.36704, saving model to models/weights-improvement-55-0.00.hdf5
Epoch 00056: loss improved from 51233.36704 to 51205.16172, saving model to models/weights-improvement-56-0.00.hdf5
Epoch 00057: loss did not improve
Epoch 00058: loss did not improve
Epoch 00059: loss improved from 51205.16172 to 51171.15500, saving model to models/weights-improvement-59-0.00.hdf5
Epoch 00060: loss improved from 51171.15500 to 51122.75661, saving model to models/weights-improvement-60-0.00.hdf5
Epoch 00061: loss improved from 51122.75661 to 51044.55342, saving model to models/weights-improvement-61-0.00.hdf5
Epoch 00062: loss improved from 51044.55342 to 50981.76795, saving model to models/weights-improvement-62-0.00.hdf5
Epoch 00063: loss improved from 50981.76795 to 50929.99937, saving model to models/weights-improvement-63-0.00.hdf5
Epoch 00064: loss improved from 50929.99937 to 50850.29676, saving model to models/weights-improvement-64-0.00.hdf5
Epoch 00065: loss improved from 50850.29676 to 50800.08639, saving model to models/weights-improvement-65-0.00.hdf5
Epoch 00066: loss improved from 50800.08639 to 50721.84237, saving model to models/weights-improvement-66-0.00.hdf5
Epoch 00067: loss improved from 50721.84237 to 50618.50945, saving model to models/weights-improvement-67-0.00.hdf5
Epoch 00068: loss did not improve
Epoch 00069: loss did not improve
Epoch 00070: loss did not improve
Epoch 00071: loss improved from 50618.50945 to 50465.49511, saving model to models/weights-improvement-71-0.00.hdf5
Epoch 00072: loss improved from 50465.49511 to 50400.78086, saving model to models/weights-improvement-72-0.00.hdf5
Epoch 00073: loss improved from 50400.78086 to 50297.67801, saving model to models/weights-improvement-73-0.00.hdf5
Epoch 00074: loss improved from 50297.67801 to 50203.59103, saving model to models/weights-improvement-74-0.00.hdf5
Epoch 00075: loss improved from 50203.59103 to 50078.64167, saving model to models/weights-improvement-75-0.00.hdf5
Epoch 00076: loss improved from 50078.64167 to 50017.78119, saving model to models/weights-improvement-76-0.00.hdf5
Epoch 00077: loss improved from 50017.78119 to 49786.92829, saving model to models/weights-improvement-77-0.00.hdf5
Epoch 00078: loss improved from 49786.92829 to 49626.54002, saving model to models/weights-improvement-78-0.00.hdf5
Epoch 00079: loss improved from 49626.54002 to 49569.07411, saving model to models/weights-improvement-79-0.00.hdf5
Epoch 00080: loss did not improve
Epoch 00081: loss did not improve
Epoch 00082: loss improved from 49569.07411 to 49372.05342, saving model to models/weights-improvement-82-0.00.hdf5
Epoch 00083: loss improved from 49372.05342 to 49230.95319, saving model to models/weights-improvement-83-0.00.hdf5
Epoch 00084: loss improved from 49230.95319 to 48993.14354, saving model to models/weights-improvement-84-0.00.hdf5
Epoch 00085: loss improved from 48993.14354 to 48892.52868, saving model to models/weights-improvement-85-0.00.hdf5
Epoch 00086: loss improved from 48892.52868 to 48697.07527, saving model to models/weights-improvement-86-0.00.hdf5
Epoch 00087: loss improved from 48697.07527 to 48629.35127, saving model to models/weights-improvement-87-0.00.hdf5
Epoch 00088: loss improved from 48629.35127 to 48362.96724, saving model to models/weights-improvement-88-0.00.hdf5
Epoch 00089: loss did not improve
Epoch 00090: loss improved from 48362.96724 to 47321.16012, saving model to models/weights-improvement-90-0.00.hdf5
Epoch 00091: loss did not improve
Epoch 00092: loss did not improve
Epoch 00093: loss did not improve
Epoch 00094: loss did not improve
Epoch 00095: loss improved from 47321.16012 to 47272.93272, saving model to models/weights-improvement-95-0.00.hdf5
Epoch 00096: loss improved from 47272.93272 to 47264.82528, saving model to models/weights-improvement-96-0.00.hdf5
Epoch 00097: loss improved from 47264.82528 to 46812.95144, saving model to models/weights-improvement-97-0.00.hdf5
Epoch 00098: loss improved from 46812.95144 to 46745.53158, saving model to models/weights-improvement-98-0.00.hdf5
Epoch 00099: loss did not improve
Epoch 00100: loss did not improve
Epoch 00101: loss did not improve
Epoch 00102: loss improved from 46745.53158 to 46437.02148, saving model to models/weights-improvement-102-0.00.hdf5
Epoch 00103: loss improved from 46437.02148 to 46014.55384, saving model to models/weights-improvement-103-0.00.hdf5
Epoch 00104: loss improved from 46014.55384 to 45964.92865, saving model to models/weights-improvement-104-0.00.hdf5
Epoch 00105: loss improved from 45964.92865 to 45658.61822, saving model to models/weights-improvement-105-0.00.hdf5
Epoch 00106: loss improved from 45658.61822 to 45547.86091, saving model to models/weights-improvement-106-0.00.hdf5
Epoch 00107: loss improved from 45547.86091 to 45161.64024, saving model to models/weights-improvement-107-0.00.hdf5
Epoch 00108: loss improved from 45161.64024 to 45003.43564, saving model to models/weights-improvement-108-0.00.hdf5
Epoch 00109: loss improved from 45003.43564 to 44509.93486, saving model to models/weights-improvement-109-0.00.hdf5
Epoch 00110: loss did not improve
Epoch 00111: loss did not improve
Epoch 00112: loss did not improve
Epoch 00113: loss improved from 44509.93486 to 44271.58077, saving model to models/weights-improvement-113-0.00.hdf5
Epoch 00114: loss improved from 44271.58077 to 44097.82485, saving model to models/weights-improvement-114-0.00.hdf5
Epoch 00115: loss improved from 44097.82485 to 43823.66651, saving model to models/weights-improvement-115-0.00.hdf5
Epoch 00116: loss did not improve
Epoch 00117: loss improved from 43823.66651 to 43340.40762, saving model to models/weights-improvement-117-0.00.hdf5
Epoch 00118: loss did not improve
Epoch 00119: loss improved from 43340.40762 to 42585.79050, saving model to models/weights-improvement-119-0.00.hdf5
Epoch 00120: loss improved from 42585.79050 to 42271.77151, saving model to models/weights-improvement-120-0.00.hdf5
Epoch 00121: loss improved from 42271.77151 to 42084.92931, saving model to models/weights-improvement-121-0.00.hdf5
Epoch 00122: loss did not improve
Epoch 00123: loss improved from 42084.92931 to 39157.14490, saving model to models/weights-improvement-123-0.00.hdf5
Epoch 00124: loss did not improve
Epoch 00125: loss did not improve
Epoch 00126: loss did not improve
Epoch 00127: loss did not improve
Epoch 00128: loss did not improve
Epoch 00129: loss did not improve
Epoch 00130: loss did not improve
Epoch 00131: loss did not improve
Epoch 00132: loss did not improve
Epoch 00133: loss did not improve
Epoch 00134: loss improved from 39157.14490 to 35762.29130, saving model to models/weights-improvement-134-0.00.hdf5
Epoch 00135: loss did not improve
Epoch 00136: loss did not improve
Epoch 00137: loss did not improve
Epoch 00138: loss did not improve
Epoch 00139: loss did not improve
Epoch 00140: loss did not improve
Epoch 00141: loss did not improve
Epoch 00142: loss did not improve
Epoch 00143: loss did not improve
Epoch 00144: loss did not improve
Epoch 00145: loss did not improve
Epoch 00146: loss did not improve
Epoch 00147: loss did not improve
Epoch 00148: loss did not improve
Epoch 00149: loss improved from 35762.29130 to 35519.26107, saving model to models/weights-improvement-149-0.00.hdf5
Epoch 00150: loss did not improve
Epoch 00151: loss did not improve
Epoch 00152: loss did not improve
Epoch 00153: loss did not improve
Epoch 00154: loss did not improve
Epoch 00155: loss did not improve
Epoch 00156: loss improved from 35519.26107 to 35465.71750, saving model to models/weights-improvement-156-0.00.hdf5
Epoch 00157: loss improved from 35465.71750 to 34753.78465, saving model to models/weights-improvement-157-0.00.hdf5
Epoch 00158: loss did not improve
Epoch 00159: loss improved from 34753.78465 to 34093.30492, saving model to models/weights-improvement-159-0.00.hdf5
Epoch 00160: loss did not improve
Epoch 00161: loss improved from 34093.30492 to 33417.83008, saving model to models/weights-improvement-161-0.00.hdf5
Epoch 00162: loss did not improve
Epoch 00163: loss did not improve
Epoch 00164: loss improved from 33417.83008 to 25733.55898, saving model to models/weights-improvement-164-0.00.hdf5
Epoch 00165: loss did not improve
Epoch 00166: loss did not improve
Epoch 00167: loss did not improve
Epoch 00168: loss did not improve
Epoch 00169: loss did not improve
Epoch 00170: loss did not improve
Epoch 00171: loss did not improve
Epoch 00172: loss did not improve
Epoch 00173: loss did not improve
Epoch 00174: loss did not improve
Epoch 00175: loss did not improve
Epoch 00176: loss did not improve
Epoch 00177: loss did not improve
Epoch 00178: loss did not improve
Epoch 00179: loss did not improve
Epoch 00180: loss did not improve
Epoch 00181: loss did not improve
Epoch 00182: loss did not improve
Epoch 00183: loss improved from 25733.55898 to 22910.10574, saving model to models/weights-improvement-183-0.00.hdf5
Epoch 00184: loss did not improve
Epoch 00185: loss did not improve
Epoch 00186: loss did not improve
Epoch 00187: loss did not improve
Epoch 00188: loss did not improve
Epoch 00189: loss did not improve
Epoch 00190: loss did not improve
Epoch 00191: loss did not improve
Epoch 00192: loss did not improve
Epoch 00193: loss did not improve
Epoch 00194: loss did not improve
Epoch 00195: loss did not improve
Epoch 00196: loss improved from 22910.10574 to 16400.94867, saving model to models/weights-improvement-196-0.00.hdf5
Epoch 00197: loss improved from 16400.94867 to 14736.94699, saving model to models/weights-improvement-197-0.00.hdf5
Epoch 00198: loss improved from 14736.94699 to 9009.05851, saving model to models/weights-improvement-198-0.00.hdf5
Epoch 00199: loss did not improve
Epoch 00200: loss improved from 9009.05851 to 8405.84592, saving model to models/weights-improvement-200-0.00.hdf5
Epoch 00201: loss did not improve
Epoch 00202: loss did not improve
Epoch 00203: loss did not improve
Epoch 00204: loss did not improve
Epoch 00205: loss did not improve
Epoch 00206: loss did not improve
Epoch 00207: loss did not improve
Epoch 00208: loss did not improve
Epoch 00209: loss did not improve
Epoch 00210: loss improved from 8405.84592 to 4536.10917, saving model to models/weights-improvement-210-0.00.hdf5
Epoch 00211: loss did not improve
Epoch 00212: loss did not improve
Epoch 00213: loss did not improve
Epoch 00214: loss did not improve
Epoch 00215: loss did not improve
Epoch 00216: loss did not improve
Epoch 00217: loss did not improve
Epoch 00218: loss did not improve
Epoch 00219: loss did not improve
Epoch 00220: loss improved from 4536.10917 to 4358.50454, saving model to models/weights-improvement-220-0.00.hdf5
Epoch 00221: loss did not improve
Epoch 00222: loss did not improve
Epoch 00223: loss did not improve
Epoch 00224: loss did not improve
Epoch 00225: loss did not improve
Epoch 00226: loss did not improve
Epoch 00227: loss did not improve
Epoch 00228: loss did not improve
Epoch 00229: loss did not improve
Epoch 00230: loss did not improve
Epoch 00231: loss did not improve
Epoch 00232: loss did not improve
Epoch 00233: loss did not improve
Epoch 00234: loss did not improve
Epoch 00235: loss did not improve
Epoch 00236: loss did not improve
Epoch 00237: loss did not improve
Epoch 00238: loss did not improve
Epoch 00239: loss did not improve
Epoch 00240: loss did not improve
Epoch 00241: loss did not improve
Epoch 00242: loss did not improve
Epoch 00243: loss did not improve
Epoch 00244: loss did not improve
Epoch 00245: loss did not improve
Epoch 00246: loss did not improve
Epoch 00247: loss did not improve
Epoch 00248: loss did not improve
Epoch 00249: loss did not improve
Epoch 00250: loss did not improve
Epoch 00251: loss did not improve
Epoch 00252: loss did not improve
Epoch 00253: loss did not improve
Epoch 00254: loss did not improve
Epoch 00255: loss did not improve
Epoch 00256: loss did not improve
Epoch 00257: loss did not improve
Epoch 00258: loss did not improve
Epoch 00259: loss did not improve
Epoch 00260: loss did not improve
Epoch 00261: loss did not improve
Epoch 00262: loss did not improve
Epoch 00263: loss did not improve
Epoch 00264: loss did not improve
Epoch 00265: loss did not improve
Epoch 00266: loss did not improve
Epoch 00267: loss did not improve
Epoch 00268: loss did not improve
Epoch 00269: loss did not improve
Epoch 00270: loss did not improve
Epoch 00271: loss did not improve
Epoch 00272: loss did not improve
Epoch 00273: loss did not improve
Epoch 00274: loss did not improve
Epoch 00275: loss did not improve
Epoch 00276: loss did not improve
Epoch 00277: loss did not improve
Epoch 00278: loss did not improve
Epoch 00279: loss did not improve
Epoch 00280: loss did not improve
Epoch 00281: loss did not improve
Epoch 00282: loss did not improve
Epoch 00283: loss did not improve
Epoch 00284: loss did not improve
Epoch 00285: loss did not improve
Epoch 00286: loss did not improve
Epoch 00287: loss did not improve
Epoch 00288: loss did not improve
Epoch 00289: loss did not improve
Epoch 00290: loss did not improve
Epoch 00291: loss did not improve
Epoch 00292: loss did not improve
Epoch 00293: loss did not improve
Epoch 00294: loss did not improve
Epoch 00295: loss did not improve
Epoch 00296: loss did not improve
Epoch 00297: loss did not improve
Epoch 00298: loss did not improve
Epoch 00299: loss did not improve
Epoch 00300: loss did not improve
Epoch 00301: loss did not improve
Epoch 00302: loss did not improve
Epoch 00303: loss did not improve
Epoch 00304: loss did not improve
Epoch 00305: loss did not improve
Epoch 00306: loss did not improve
Epoch 00307: loss did not improve
Epoch 00308: loss did not improve
Epoch 00309: loss did not improve
Epoch 00310: loss did not improve
Epoch 00311: loss did not improve
Epoch 00312: loss did not improve
Epoch 00313: loss did not improve
Epoch 00314: loss did not improve
Epoch 00315: loss did not improve
Epoch 00316: loss did not improve
Epoch 00317: loss did not improve
Epoch 00318: loss did not improve
Epoch 00319: loss did not improve
Epoch 00320: loss did not improve
Epoch 00321: loss did not improve
Epoch 00322: loss did not improve
Epoch 00323: loss did not improve
Epoch 00324: loss did not improve
Epoch 00325: loss did not improve
Epoch 00326: loss did not improve
Epoch 00327: loss did not improve
Epoch 00328: loss did not improve
Epoch 00329: loss did not improve
Epoch 00330: loss did not improve
Epoch 00331: loss did not improve
Epoch 00332: loss did not improve
Epoch 00333: loss did not improve
Epoch 00334: loss did not improve
Epoch 00335: loss did not improve
Epoch 00336: loss did not improve
Epoch 00337: loss did not improve
Epoch 00338: loss did not improve
Epoch 00339: loss did not improve
Epoch 00340: loss did not improve
Epoch 00341: loss did not improve
Epoch 00342: loss did not improve
Epoch 00343: loss did not improve
Epoch 00344: loss did not improve
Epoch 00345: loss did not improve
Epoch 00346: loss did not improve
Epoch 00347: loss did not improve
Epoch 00348: loss did not improve
Epoch 00349: loss did not improve
Epoch 00350: loss did not improve
Epoch 00351: loss did not improve
Epoch 00352: loss did not improve
Epoch 00353: loss did not improve
Epoch 00354: loss did not improve
Epoch 00355: loss did not improve
Epoch 00356: loss did not improve
Epoch 00357: loss did not improve
Epoch 00358: loss did not improve
Epoch 00359: loss did not improve
Epoch 00360: loss did not improve
Epoch 00361: loss did not improve
Epoch 00362: loss did not improve
Epoch 00363: loss did not improve
Epoch 00364: loss did not improve
Epoch 00365: loss did not improve
Epoch 00366: loss did not improve
Epoch 00367: loss improved from 4358.50454 to 4015.25756, saving model to models/weights-improvement-367-0.00.hdf5
Epoch 00368: loss did not improve
Epoch 00369: loss did not improve
Epoch 00370: loss did not improve
Epoch 00371: loss did not improve
Epoch 00372: loss did not improve
Epoch 00373: loss did not improve
Epoch 00374: loss did not improve
Epoch 00375: loss did not improve
Epoch 00376: loss did not improve
Epoch 00377: loss did not improve
Epoch 00378: loss did not improve
Epoch 00379: loss did not improve
Epoch 00380: loss did not improve
Epoch 00381: loss did not improve
Epoch 00382: loss did not improve
Epoch 00383: loss did not improve
Epoch 00384: loss did not improve
Epoch 00385: loss did not improve
Epoch 00386: loss did not improve
Epoch 00387: loss did not improve
Epoch 00388: loss did not improve
Epoch 00389: loss improved from 4015.25756 to 3966.60368, saving model to models/weights-improvement-389-0.00.hdf5
Epoch 00390: loss did not improve
Epoch 00391: loss did not improve
Epoch 00392: loss did not improve
Epoch 00393: loss did not improve
Epoch 00394: loss did not improve
Epoch 00395: loss did not improve
Epoch 00396: loss did not improve
Epoch 00397: loss did not improve
Epoch 00398: loss did not improve
Epoch 00399: loss improved from 3966.60368 to 3538.82919, saving model to models/weights-improvement-399-0.00.hdf5
Epoch 00400: loss improved from 3538.82919 to 3533.29304, saving model to models/weights-improvement-400-0.00.hdf5
Epoch 00401: loss improved from 3533.29304 to 3241.42275, saving model to models/weights-improvement-401-0.00.hdf5
Epoch 00402: loss did not improve
Epoch 00403: loss did not improve
Epoch 00404: loss did not improve
Epoch 00405: loss did not improve
Epoch 00406: loss did not improve
Epoch 00407: loss did not improve
Epoch 00408: loss did not improve
Epoch 00409: loss did not improve
Epoch 00410: loss did not improve
Epoch 00411: loss did not improve
Epoch 00412: loss did not improve
Epoch 00413: loss did not improve
Epoch 00414: loss did not improve
Epoch 00415: loss did not improve
Epoch 00416: loss did not improve
Epoch 00417: loss did not improve
Epoch 00418: loss did not improve
Epoch 00419: loss did not improve
Epoch 00420: loss did not improve
Epoch 00421: loss did not improve
Epoch 00422: loss did not improve
Epoch 00423: loss did not improve
Epoch 00424: loss did not improve
Epoch 00425: loss did not improve
Epoch 00426: loss did not improve
Epoch 00427: loss did not improve
Epoch 00428: loss did not improve
Epoch 00429: loss did not improve
Epoch 00430: loss did not improve
Epoch 00431: loss did not improve
Epoch 00432: loss did not improve
Epoch 00433: loss did not improve
Epoch 00434: loss did not improve
Epoch 00435: loss did not improve
Epoch 00436: loss did not improve
Epoch 00437: loss did not improve
Epoch 00438: loss did not improve
Epoch 00439: loss did not improve
Epoch 00440: loss did not improve
Epoch 00441: loss did not improve
Epoch 00442: loss did not improve
Epoch 00443: loss did not improve
Epoch 00444: loss did not improve
Epoch 00445: loss did not improve
Epoch 00446: loss did not improve
Epoch 00447: loss did not improve
Epoch 00448: loss did not improve
Epoch 00449: loss did not improve
Epoch 00450: loss did not improve
Epoch 00451: loss did not improve
Epoch 00452: loss did not improve
Epoch 00453: loss did not improve
Epoch 00454: loss did not improve
Epoch 00455: loss did not improve
Epoch 00456: loss did not improve
Epoch 00457: loss did not improve
Epoch 00458: loss did not improve
Epoch 00459: loss did not improve
Epoch 00460: loss did not improve
Epoch 00461: loss did not improve
Epoch 00462: loss did not improve
Epoch 00463: loss did not improve
Epoch 00464: loss did not improve
Epoch 00465: loss did not improve
Epoch 00466: loss did not improve
Epoch 00467: loss did not improve
Epoch 00468: loss did not improve
Epoch 00469: loss did not improve
Epoch 00470: loss did not improve
Epoch 00471: loss did not improve
Epoch 00472: loss did not improve
Epoch 00473: loss did not improve
Epoch 00474: loss did not improve
Epoch 00475: loss did not improve
Epoch 00476: loss did not improve
Epoch 00477: loss did not improve
Epoch 00478: loss did not improve
Epoch 00479: loss did not improve
Epoch 00480: loss did not improve
Epoch 00481: loss did not improve
Epoch 00482: loss did not improve
Epoch 00483: loss did not improve
Epoch 00484: loss did not improve
Epoch 00485: loss did not improve
Epoch 00486: loss did not improve
Epoch 00487: loss did not improve
Epoch 00488: loss did not improve
Epoch 00489: loss did not improve
Epoch 00490: loss did not improve
Epoch 00491: loss did not improve
Epoch 00492: loss did not improve
Epoch 00493: loss did not improve
Epoch 00494: loss did not improve
Epoch 00495: loss did not improve
Epoch 00496: loss did not improve
Epoch 00497: loss did not improve
Epoch 00498: loss did not improve
Epoch 00499: loss did not improve
Epoch 00500: loss did not improve
Epoch 00501: loss did not improve
Epoch 00502: loss did not improve
Epoch 00503: loss did not improve
Epoch 00504: loss did not improve
Epoch 00505: loss did not improve
Epoch 00506: loss did not improve
Epoch 00507: loss did not improve
Epoch 00508: loss did not improve
Epoch 00509: loss did not improve
Epoch 00510: loss did not improve
Epoch 00511: loss did not improve
Epoch 00512: loss did not improve
Epoch 00513: loss did not improve
Epoch 00514: loss did not improve
Epoch 00515: loss did not improve
Epoch 00516: loss did not improve
Epoch 00517: loss did not improve
Epoch 00518: loss did not improve
Epoch 00519: loss did not improve
Epoch 00520: loss did not improve
Epoch 00521: loss did not improve
Epoch 00522: loss did not improve
Epoch 00523: loss did not improve
Epoch 00524: loss did not improve
Epoch 00525: loss did not improve
Epoch 00526: loss did not improve
Epoch 00527: loss did not improve
Epoch 00528: loss did not improve
Epoch 00529: loss did not improve
Epoch 00530: loss did not improve
Epoch 00531: loss did not improve
Epoch 00532: loss did not improve
Epoch 00533: loss did not improve
Epoch 00534: loss did not improve
Epoch 00535: loss did not improve
Epoch 00536: loss did not improve
Epoch 00537: loss did not improve
Epoch 00538: loss did not improve
Epoch 00539: loss did not improve
Epoch 00540: loss did not improve
Epoch 00541: loss did not improve
Epoch 00542: loss did not improve
Epoch 00543: loss did not improve
Epoch 00544: loss did not improve
Epoch 00545: loss did not improve
Epoch 00546: loss did not improve
Epoch 00547: loss did not improve
Epoch 00548: loss did not improve
Epoch 00549: loss did not improve
Epoch 00550: loss did not improve
Epoch 00551: loss did not improve
Epoch 00552: loss did not improve
Epoch 00553: loss did not improve
Epoch 00554: loss did not improve
Epoch 00555: loss did not improve
Epoch 00556: loss did not improve
Epoch 00557: loss did not improve
Epoch 00558: loss did not improve
Epoch 00559: loss did not improve
Epoch 00560: loss did not improve
Epoch 00561: loss did not improve
Epoch 00562: loss did not improve
Epoch 00563: loss did not improve
Epoch 00564: loss did not improve
Epoch 00565: loss did not improve
Epoch 00566: loss did not improve
Epoch 00567: loss did not improve
Epoch 00568: loss did not improve
Epoch 00569: loss did not improve
Epoch 00570: loss did not improve
Epoch 00571: loss did not improve
Epoch 00572: loss did not improve
Epoch 00573: loss did not improve
Epoch 00574: loss did not improve
Epoch 00575: loss did not improve
Epoch 00576: loss did not improve
Epoch 00577: loss did not improve
Epoch 00578: loss did not improve
Epoch 00579: loss did not improve
Epoch 00580: loss did not improve
Epoch 00581: loss did not improve
Epoch 00582: loss did not improve
Epoch 00583: loss did not improve
Epoch 00584: loss did not improve
Epoch 00585: loss did not improve
Epoch 00586: loss did not improve
Epoch 00587: loss did not improve
Epoch 00588: loss did not improve
Epoch 00589: loss did not improve
Epoch 00590: loss did not improve
Epoch 00591: loss did not improve
Epoch 00592: loss did not improve
Epoch 00593: loss did not improve
Epoch 00594: loss did not improve
Epoch 00595: loss did not improve
Epoch 00596: loss did not improve
Epoch 00597: loss did not improve
Epoch 00598: loss did not improve
Epoch 00599: loss did not improve
Epoch 00600: loss did not improve
Epoch 00601: loss did not improve
Epoch 00602: loss did not improve
Epoch 00603: loss did not improve
Epoch 00604: loss did not improve
Epoch 00605: loss did not improve
Epoch 00606: loss did not improve
Epoch 00607: loss did not improve
Epoch 00608: loss did not improve
Epoch 00609: loss did not improve
Epoch 00610: loss did not improve
Epoch 00611: loss did not improve
Epoch 00612: loss did not improve
Epoch 00613: loss did not improve
Epoch 00614: loss did not improve
Epoch 00615: loss did not improve
Epoch 00616: loss did not improve
Epoch 00617: loss did not improve
Epoch 00618: loss did not improve
Epoch 00619: loss did not improve
Epoch 00620: loss did not improve
Epoch 00621: loss did not improve
Epoch 00622: loss did not improve
Epoch 00623: loss did not improve
Epoch 00624: loss did not improve
Epoch 00625: loss did not improve
Epoch 00626: loss did not improve
Epoch 00627: loss did not improve
Epoch 00628: loss did not improve
Epoch 00629: loss did not improve
Epoch 00630: loss did not improve
Epoch 00631: loss did not improve
Epoch 00632: loss did not improve
Epoch 00633: loss did not improve
Epoch 00634: loss did not improve
Epoch 00635: loss did not improve
Epoch 00636: loss did not improve
Epoch 00637: loss did not improve
Epoch 00638: loss did not improve
Epoch 00639: loss did not improve
Epoch 00640: loss did not improve
Epoch 00641: loss did not improve
Epoch 00642: loss did not improve
Epoch 00643: loss did not improve
Epoch 00644: loss did not improve
Epoch 00645: loss did not improve
Epoch 00646: loss did not improve
Epoch 00647: loss did not improve
Epoch 00648: loss did not improve
Epoch 00649: loss did not improve
Epoch 00650: loss did not improve
Epoch 00651: loss did not improve
Epoch 00652: loss did not improve
Epoch 00653: loss did not improve
Epoch 00654: loss did not improve
Epoch 00655: loss did not improve
Epoch 00656: loss did not improve
Epoch 00657: loss did not improve
Epoch 00658: loss did not improve
Epoch 00659: loss did not improve
Epoch 00660: loss did not improve
Epoch 00661: loss did not improve
Epoch 00662: loss did not improve
Epoch 00663: loss did not improve
Epoch 00664: loss did not improve
Epoch 00665: loss did not improve
Epoch 00666: loss did not improve
Epoch 00667: loss did not improve
Epoch 00668: loss did not improve
Epoch 00669: loss did not improve
Epoch 00670: loss did not improve
Epoch 00671: loss did not improve
Epoch 00672: loss did not improve
Epoch 00673: loss did not improve
Epoch 00674: loss did not improve
Epoch 00675: loss did not improve
Epoch 00676: loss did not improve
Epoch 00677: loss did not improve
Epoch 00678: loss did not improve
Epoch 00679: loss did not improve
Epoch 00680: loss did not improve
Epoch 00681: loss did not improve
Epoch 00682: loss did not improve
Epoch 00683: loss did not improve
Epoch 00684: loss did not improve
Epoch 00685: loss did not improve
Epoch 00686: loss did not improve
Epoch 00687: loss did not improve
Epoch 00688: loss did not improve
Epoch 00689: loss did not improve
Epoch 00690: loss did not improve
Epoch 00691: loss did not improve
Epoch 00692: loss did not improve
Epoch 00693: loss did not improve
Epoch 00694: loss did not improve
Epoch 00695: loss did not improve
Epoch 00696: loss did not improve
Epoch 00697: loss did not improve
Epoch 00698: loss did not improve
Epoch 00699: loss did not improve
Epoch 00700: loss did not improve
Epoch 00701: loss did not improve
Epoch 00702: loss did not improve
Epoch 00703: loss did not improve
Epoch 00704: loss did not improve
Epoch 00705: loss did not improve
Epoch 00706: loss did not improve
Epoch 00707: loss did not improve
Epoch 00708: loss did not improve
Epoch 00709: loss did not improve
Epoch 00710: loss did not improve
Epoch 00711: loss did not improve
Epoch 00712: loss did not improve
Epoch 00713: loss did not improve
Epoch 00714: loss did not improve
Epoch 00715: loss did not improve
Epoch 00716: loss did not improve
Epoch 00717: loss did not improve
Epoch 00718: loss did not improve
Epoch 00719: loss did not improve
Epoch 00720: loss did not improve
Epoch 00721: loss did not improve
Epoch 00722: loss did not improve
Epoch 00723: loss did not improve
Epoch 00724: loss did not improve
Epoch 00725: loss did not improve
Epoch 00726: loss did not improve
Epoch 00727: loss did not improve
Epoch 00728: loss did not improve
Epoch 00729: loss did not improve
Epoch 00730: loss did not improve
Epoch 00731: loss did not improve
Epoch 00732: loss did not improve
Epoch 00733: loss did not improve
Epoch 00734: loss did not improve
Epoch 00735: loss did not improve
Epoch 00736: loss did not improve
Epoch 00737: loss did not improve
Epoch 00738: loss did not improve
Epoch 00739: loss did not improve
Epoch 00740: loss did not improve
Epoch 00741: loss did not improve
Epoch 00742: loss did not improve
Epoch 00743: loss did not improve
Epoch 00744: loss did not improve
Epoch 00745: loss did not improve
Epoch 00746: loss did not improve
Epoch 00747: loss did not improve
Epoch 00748: loss did not improve
Epoch 00749: loss did not improve
Epoch 00750: loss did not improve
Epoch 00751: loss did not improve
Epoch 00752: loss did not improve
Epoch 00753: loss did not improve
Epoch 00754: loss did not improve
Epoch 00755: loss did not improve
Epoch 00756: loss did not improve
Epoch 00757: loss did not improve
Epoch 00758: loss did not improve
Epoch 00759: loss did not improve
Epoch 00760: loss did not improve
Epoch 00761: loss did not improve
Epoch 00762: loss did not improve
Epoch 00763: loss did not improve
Epoch 00764: loss did not improve
Epoch 00765: loss did not improve
Epoch 00766: loss did not improve
Epoch 00767: loss did not improve
Epoch 00768: loss did not improve
Epoch 00769: loss did not improve
Epoch 00770: loss did not improve
Epoch 00771: loss did not improve
Epoch 00772: loss did not improve
Epoch 00773: loss did not improve
Epoch 00774: loss did not improve
Epoch 00775: loss did not improve
Epoch 00776: loss did not improve
Epoch 00777: loss did not improve
Epoch 00778: loss did not improve
Epoch 00779: loss did not improve
Epoch 00780: loss did not improve
Epoch 00781: loss did not improve
Epoch 00782: loss did not improve
Epoch 00783: loss did not improve
Epoch 00784: loss did not improve
Epoch 00785: loss did not improve
Epoch 00786: loss did not improve
Epoch 00787: loss did not improve
Epoch 00788: loss did not improve
Epoch 00789: loss did not improve
Epoch 00790: loss did not improve
Epoch 00791: loss did not improve
Epoch 00792: loss did not improve
Epoch 00793: loss did not improve
Epoch 00794: loss did not improve
Epoch 00795: loss did not improve
Epoch 00796: loss did not improve
Epoch 00797: loss did not improve
Epoch 00798: loss did not improve
Epoch 00799: loss did not improve
Epoch 00800: loss did not improve
Epoch 00801: loss did not improve
Epoch 00802: loss did not improve
Epoch 00803: loss did not improve
Epoch 00804: loss did not improve
Epoch 00805: loss did not improve
Epoch 00806: loss did not improve
Epoch 00807: loss did not improve
Epoch 00808: loss did not improve
Epoch 00809: loss did not improve
Epoch 00810: loss did not improve
Epoch 00811: loss did not improve
Epoch 00812: loss did not improve
Epoch 00813: loss did not improve
Epoch 00814: loss did not improve
Epoch 00815: loss did not improve
Epoch 00816: loss did not improve
Epoch 00817: loss did not improve
Epoch 00818: loss did not improve
Epoch 00819: loss did not improve
Epoch 00820: loss did not improve
Epoch 00821: loss did not improve
Epoch 00822: loss did not improve
Epoch 00823: loss did not improve
Epoch 00824: loss did not improve
Epoch 00825: loss did not improve
Epoch 00826: loss did not improve
Epoch 00827: loss did not improve
Epoch 00828: loss did not improve
Epoch 00829: loss did not improve
Epoch 00830: loss did not improve
Epoch 00831: loss did not improve
Epoch 00832: loss did not improve
Epoch 00833: loss did not improve
Epoch 00834: loss did not improve
Epoch 00835: loss did not improve
Epoch 00836: loss did not improve
Epoch 00837: loss did not improve
Epoch 00838: loss did not improve
Epoch 00839: loss did not improve
Epoch 00840: loss did not improve
Epoch 00841: loss did not improve
Epoch 00842: loss did not improve
Epoch 00843: loss did not improve
Epoch 00844: loss did not improve
Epoch 00845: loss did not improve
Epoch 00846: loss did not improve
Epoch 00847: loss did not improve
Epoch 00848: loss did not improve
Epoch 00849: loss did not improve
Epoch 00850: loss did not improve
Epoch 00851: loss did not improve
Epoch 00852: loss did not improve
Epoch 00853: loss did not improve
Epoch 00854: loss did not improve
Epoch 00855: loss did not improve
Epoch 00856: loss did not improve
Epoch 00857: loss did not improve
Epoch 00858: loss did not improve
Epoch 00859: loss did not improve
Epoch 00860: loss did not improve
Epoch 00861: loss did not improve
Epoch 00862: loss did not improve
Epoch 00863: loss did not improve
Epoch 00864: loss improved from 3241.42275 to 3158.41546, saving model to models/weights-improvement-864-0.00.hdf5
Epoch 00865: loss did not improve
Epoch 00866: loss did not improve
Epoch 00867: loss did not improve
Epoch 00868: loss did not improve
Epoch 00869: loss did not improve
Epoch 00870: loss did not improve
Epoch 00871: loss did not improve
Epoch 00872: loss did not improve
Epoch 00873: loss did not improve
Epoch 00874: loss did not improve
Epoch 00875: loss did not improve
Epoch 00876: loss did not improve
Epoch 00877: loss did not improve
Epoch 00878: loss did not improve
Epoch 00879: loss did not improve
Epoch 00880: loss did not improve
Epoch 00881: loss did not improve
Epoch 00882: loss did not improve
Epoch 00883: loss did not improve
Epoch 00884: loss did not improve
Epoch 00885: loss did not improve
Epoch 00886: loss did not improve
Epoch 00887: loss did not improve
Epoch 00888: loss did not improve
Epoch 00889: loss did not improve
Epoch 00890: loss did not improve
Epoch 00891: loss did not improve
Epoch 00892: loss did not improve
Epoch 00893: loss did not improve
Epoch 00894: loss did not improve
Epoch 00895: loss did not improve
Epoch 00896: loss did not improve
Epoch 00897: loss did not improve
Epoch 00898: loss did not improve
Epoch 00899: loss did not improve
Epoch 00900: loss did not improve
Epoch 00901: loss did not improve
Epoch 00902: loss did not improve
Epoch 00903: loss did not improve
Epoch 00904: loss improved from 3158.41546 to 3096.65460, saving model to models/weights-improvement-904-0.00.hdf5
Epoch 00905: loss did not improve
Epoch 00906: loss did not improve
Epoch 00907: loss did not improve
Epoch 00908: loss did not improve
Epoch 00909: loss did not improve
Epoch 00910: loss did not improve
Epoch 00911: loss did not improve
Epoch 00912: loss did not improve
Epoch 00913: loss did not improve
Epoch 00914: loss did not improve
Epoch 00915: loss did not improve
Epoch 00916: loss did not improve
Epoch 00917: loss did not improve
Epoch 00918: loss did not improve
Epoch 00919: loss did not improve
Epoch 00920: loss did not improve
Epoch 00921: loss did not improve
Epoch 00922: loss did not improve
Epoch 00923: loss did not improve
Epoch 00924: loss did not improve
Epoch 00925: loss did not improve
Epoch 00926: loss improved from 3096.65460 to 3075.67240, saving model to models/weights-improvement-926-0.00.hdf5
Epoch 00927: loss did not improve
Epoch 00928: loss did not improve
Epoch 00929: loss did not improve
Epoch 00930: loss did not improve
Epoch 00931: loss did not improve
Epoch 00932: loss did not improve
Epoch 00933: loss did not improve
Epoch 00934: loss did not improve
Epoch 00935: loss did not improve
Epoch 00936: loss did not improve
Epoch 00937: loss did not improve
Epoch 00938: loss did not improve
Epoch 00939: loss did not improve
Epoch 00940: loss did not improve
Epoch 00941: loss did not improve
Epoch 00942: loss did not improve
Epoch 00943: loss did not improve
Epoch 00944: loss did not improve
Epoch 00945: loss did not improve
Epoch 00946: loss did not improve
Epoch 00947: loss did not improve
Epoch 00948: loss did not improve
Epoch 00949: loss did not improve
Epoch 00950: loss did not improve
Epoch 00951: loss did not improve
Epoch 00952: loss did not improve
Epoch 00953: loss did not improve
Epoch 00954: loss did not improve
Epoch 00955: loss did not improve
Epoch 00956: loss did not improve
Epoch 00957: loss did not improve
Epoch 00958: loss did not improve
Epoch 00959: loss did not improve
Epoch 00960: loss did not improve
Epoch 00961: loss did not improve
Epoch 00962: loss did not improve
Epoch 00963: loss did not improve
Epoch 00964: loss did not improve
Epoch 00965: loss did not improve
Epoch 00966: loss did not improve
Epoch 00967: loss did not improve
Epoch 00968: loss did not improve
Epoch 00969: loss did not improve
Epoch 00970: loss did not improve
Epoch 00971: loss did not improve
Epoch 00972: loss did not improve
Epoch 00973: loss did not improve
Epoch 00974: loss did not improve
Epoch 00975: loss improved from 3075.67240 to 2024.22799, saving model to models/weights-improvement-975-0.00.hdf5
Epoch 00976: loss did not improve
Epoch 00977: loss did not improve
Epoch 00978: loss did not improve
Epoch 00979: loss did not improve
Epoch 00980: loss did not improve
Epoch 00981: loss did not improve
Epoch 00982: loss did not improve
Epoch 00983: loss did not improve
Epoch 00984: loss did not improve
Epoch 00985: loss did not improve
Epoch 00986: loss did not improve
Epoch 00987: loss did not improve
Epoch 00988: loss did not improve
Epoch 00989: loss did not improve
Epoch 00990: loss did not improve
Epoch 00991: loss did not improve
Epoch 00992: loss did not improve
Epoch 00993: loss did not improve
Epoch 00994: loss did not improve
Epoch 00995: loss did not improve
Epoch 00996: loss did not improve
Epoch 00997: loss did not improve
Epoch 00998: loss did not improve
Epoch 00999: loss did not improve
Epoch 01000: loss did not improve
Out[348]:
<keras.callbacks.History at 0x7f4493a09b70>
In [87]:
#model_death_pred.fit(data_x,data_y_deaths,validation_split=0.001, epochs=1000,batch_size=1,shuffle=False, callbacks=callbacks_list2,verbose=0)
Epoch 00001: loss improved from inf to 1254.53810, saving model to models2/weights-improvement-01-0.00.hdf5
Epoch 00002: loss did not improve
Epoch 00003: loss did not improve
Epoch 00004: loss did not improve
Epoch 00005: loss did not improve
Epoch 00006: loss did not improve
Epoch 00007: loss did not improve
Epoch 00008: loss did not improve
Epoch 00009: loss did not improve
Epoch 00010: loss did not improve
Epoch 00011: loss did not improve
Epoch 00012: loss did not improve
Epoch 00013: loss did not improve
Epoch 00014: loss did not improve
Epoch 00015: loss did not improve
Epoch 00016: loss did not improve
Epoch 00017: loss did not improve
Epoch 00018: loss did not improve
Epoch 00019: loss did not improve
Epoch 00020: loss did not improve
Epoch 00021: loss did not improve
Epoch 00022: loss did not improve
Epoch 00023: loss did not improve
Epoch 00024: loss did not improve
Epoch 00025: loss did not improve
Epoch 00026: loss did not improve
Epoch 00027: loss did not improve
Epoch 00028: loss did not improve
Epoch 00029: loss did not improve
Epoch 00030: loss did not improve
Epoch 00031: loss did not improve
Epoch 00032: loss did not improve
Epoch 00033: loss did not improve
Epoch 00034: loss did not improve
Epoch 00035: loss did not improve
Epoch 00036: loss did not improve
Epoch 00037: loss did not improve
Epoch 00038: loss did not improve
Epoch 00039: loss did not improve
Epoch 00040: loss did not improve
Epoch 00041: loss did not improve
Epoch 00042: loss did not improve
Epoch 00043: loss did not improve
Epoch 00044: loss did not improve
Epoch 00045: loss did not improve
Epoch 00046: loss did not improve
Epoch 00047: loss did not improve
Epoch 00048: loss did not improve
Epoch 00049: loss did not improve
Epoch 00050: loss did not improve
Epoch 00051: loss did not improve
Epoch 00052: loss did not improve
Epoch 00053: loss did not improve
Epoch 00054: loss did not improve
Epoch 00055: loss did not improve
Epoch 00056: loss did not improve
Epoch 00057: loss did not improve
Epoch 00058: loss did not improve
Epoch 00059: loss did not improve
Epoch 00060: loss did not improve
Epoch 00061: loss did not improve
Epoch 00062: loss did not improve
Epoch 00063: loss did not improve
Epoch 00064: loss did not improve
Epoch 00065: loss did not improve
Epoch 00066: loss did not improve
Epoch 00067: loss did not improve
Epoch 00068: loss did not improve
Epoch 00069: loss did not improve
Epoch 00070: loss did not improve
Epoch 00071: loss did not improve
Epoch 00072: loss did not improve
Epoch 00073: loss did not improve
Epoch 00074: loss did not improve
Epoch 00075: loss did not improve
Epoch 00076: loss did not improve
Epoch 00077: loss did not improve
Epoch 00078: loss did not improve
Epoch 00079: loss did not improve
Epoch 00080: loss did not improve
Epoch 00081: loss did not improve
Epoch 00082: loss did not improve
Epoch 00083: loss did not improve
Epoch 00084: loss did not improve
Epoch 00085: loss did not improve
Epoch 00086: loss did not improve
Epoch 00087: loss did not improve
Epoch 00088: loss did not improve
Epoch 00089: loss did not improve
Epoch 00090: loss did not improve
Epoch 00091: loss did not improve
Epoch 00092: loss did not improve
Epoch 00093: loss did not improve
Epoch 00094: loss did not improve
Epoch 00095: loss did not improve
Epoch 00096: loss did not improve
Epoch 00097: loss did not improve
Epoch 00098: loss did not improve
Epoch 00099: loss did not improve
Epoch 00100: loss did not improve
Epoch 00101: loss did not improve
Epoch 00102: loss did not improve
Epoch 00103: loss did not improve
Epoch 00104: loss did not improve
Epoch 00105: loss did not improve
Epoch 00106: loss did not improve
Epoch 00107: loss did not improve
Epoch 00108: loss did not improve
Epoch 00109: loss did not improve
Epoch 00110: loss did not improve
Epoch 00111: loss did not improve
Epoch 00112: loss did not improve
Epoch 00113: loss did not improve
Epoch 00114: loss did not improve
Epoch 00115: loss did not improve
Epoch 00116: loss did not improve
Epoch 00117: loss improved from 1254.53810 to 1184.77188, saving model to models2/weights-improvement-117-0.00.hdf5
Epoch 00118: loss did not improve
Epoch 00119: loss did not improve
Epoch 00120: loss did not improve
Epoch 00121: loss did not improve
Epoch 00122: loss did not improve
Epoch 00123: loss did not improve
Epoch 00124: loss did not improve
Epoch 00125: loss did not improve
Epoch 00126: loss did not improve
Epoch 00127: loss did not improve
Epoch 00128: loss did not improve
Epoch 00129: loss improved from 1184.77188 to 1135.81794, saving model to models2/weights-improvement-129-0.00.hdf5
Epoch 00130: loss did not improve
Epoch 00131: loss did not improve
Epoch 00132: loss did not improve
Epoch 00133: loss did not improve
Epoch 00134: loss did not improve
Epoch 00135: loss did not improve
Epoch 00136: loss did not improve
Epoch 00137: loss did not improve
Epoch 00138: loss did not improve
Epoch 00139: loss did not improve
Epoch 00140: loss did not improve
Epoch 00141: loss did not improve
Epoch 00142: loss did not improve
Epoch 00143: loss did not improve
Epoch 00144: loss did not improve
Epoch 00145: loss did not improve
Epoch 00146: loss did not improve
Epoch 00147: loss did not improve
Epoch 00148: loss did not improve
Epoch 00149: loss did not improve
Epoch 00150: loss did not improve
Epoch 00151: loss did not improve
Epoch 00152: loss did not improve
Epoch 00153: loss did not improve
Epoch 00154: loss did not improve
Epoch 00155: loss did not improve
Epoch 00156: loss improved from 1135.81794 to 1095.42210, saving model to models2/weights-improvement-156-0.00.hdf5
Epoch 00157: loss did not improve
Epoch 00158: loss did not improve
Epoch 00159: loss did not improve
Epoch 00160: loss did not improve
Epoch 00161: loss did not improve
Epoch 00162: loss did not improve
Epoch 00163: loss did not improve
Epoch 00164: loss did not improve
Epoch 00165: loss did not improve
Epoch 00166: loss did not improve
Epoch 00167: loss did not improve
Epoch 00168: loss improved from 1095.42210 to 1068.75130, saving model to models2/weights-improvement-168-0.00.hdf5
Epoch 00169: loss did not improve
Epoch 00170: loss did not improve
Epoch 00171: loss did not improve
Epoch 00172: loss did not improve
Epoch 00173: loss improved from 1068.75130 to 735.50273, saving model to models2/weights-improvement-173-0.00.hdf5
Epoch 00174: loss did not improve
Epoch 00175: loss did not improve
Epoch 00176: loss did not improve
Epoch 00177: loss did not improve
Epoch 00178: loss did not improve
Epoch 00179: loss did not improve
Epoch 00180: loss did not improve
Epoch 00181: loss improved from 735.50273 to 719.83246, saving model to models2/weights-improvement-181-0.00.hdf5
Epoch 00182: loss did not improve
Epoch 00183: loss improved from 719.83246 to 465.97012, saving model to models2/weights-improvement-183-0.00.hdf5
Epoch 00184: loss did not improve
Epoch 00185: loss did not improve
Epoch 00186: loss did not improve
Epoch 00187: loss did not improve
Epoch 00188: loss did not improve
Epoch 00189: loss did not improve
Epoch 00190: loss did not improve
Epoch 00191: loss did not improve
Epoch 00192: loss did not improve
Epoch 00193: loss did not improve
Epoch 00194: loss did not improve
Epoch 00195: loss did not improve
Epoch 00196: loss did not improve
Epoch 00197: loss improved from 465.97012 to 374.87892, saving model to models2/weights-improvement-197-0.00.hdf5
Epoch 00198: loss improved from 374.87892 to 200.26099, saving model to models2/weights-improvement-198-0.00.hdf5
Epoch 00199: loss did not improve
Epoch 00200: loss did not improve
Epoch 00201: loss did not improve
Epoch 00202: loss did not improve
Epoch 00203: loss did not improve
Epoch 00204: loss did not improve
Epoch 00205: loss did not improve
Epoch 00206: loss did not improve
Epoch 00207: loss did not improve
Epoch 00208: loss did not improve
Epoch 00209: loss did not improve
Epoch 00210: loss did not improve
Epoch 00211: loss did not improve
Epoch 00212: loss did not improve
Epoch 00213: loss did not improve
Epoch 00214: loss did not improve
Epoch 00215: loss did not improve
Epoch 00216: loss improved from 200.26099 to 150.25057, saving model to models2/weights-improvement-216-0.00.hdf5
Epoch 00217: loss did not improve
Epoch 00218: loss did not improve
Epoch 00219: loss did not improve
Epoch 00220: loss did not improve
Epoch 00221: loss did not improve
Epoch 00222: loss did not improve
Epoch 00223: loss did not improve
Epoch 00224: loss did not improve
Epoch 00225: loss did not improve
Epoch 00226: loss did not improve
Epoch 00227: loss did not improve
Epoch 00228: loss did not improve
Epoch 00229: loss did not improve
Epoch 00230: loss did not improve
Epoch 00231: loss did not improve
Epoch 00232: loss did not improve
Epoch 00233: loss did not improve
Epoch 00234: loss did not improve
Epoch 00235: loss did not improve
Epoch 00236: loss did not improve
Epoch 00237: loss did not improve
Epoch 00238: loss did not improve
Epoch 00239: loss did not improve
Epoch 00240: loss did not improve
Epoch 00241: loss did not improve
Epoch 00242: loss did not improve
Epoch 00243: loss did not improve
Epoch 00244: loss did not improve
Epoch 00245: loss did not improve
Epoch 00246: loss did not improve
Epoch 00247: loss did not improve
Epoch 00248: loss did not improve
Epoch 00249: loss did not improve
Epoch 00250: loss did not improve
Epoch 00251: loss did not improve
Epoch 00252: loss did not improve
Epoch 00253: loss did not improve
Epoch 00254: loss did not improve
Epoch 00255: loss did not improve
Epoch 00256: loss did not improve
Epoch 00257: loss did not improve
Epoch 00258: loss did not improve
Epoch 00259: loss did not improve
Epoch 00260: loss did not improve
Epoch 00261: loss did not improve
Epoch 00262: loss did not improve
Epoch 00263: loss did not improve
Epoch 00264: loss did not improve
Epoch 00265: loss did not improve
Epoch 00266: loss did not improve
Epoch 00267: loss did not improve
Epoch 00268: loss did not improve
Epoch 00269: loss did not improve
Epoch 00270: loss did not improve
Epoch 00271: loss did not improve
Epoch 00272: loss did not improve
Epoch 00273: loss did not improve
Epoch 00274: loss did not improve
Epoch 00275: loss did not improve
Epoch 00276: loss did not improve
Epoch 00277: loss did not improve
Epoch 00278: loss did not improve
Epoch 00279: loss did not improve
Epoch 00280: loss did not improve
Epoch 00281: loss did not improve
Epoch 00282: loss did not improve
Epoch 00283: loss did not improve
Epoch 00284: loss did not improve
Epoch 00285: loss did not improve
Epoch 00286: loss did not improve
Epoch 00287: loss did not improve
Epoch 00288: loss did not improve
Epoch 00289: loss did not improve
Epoch 00290: loss did not improve
Epoch 00291: loss did not improve
Epoch 00292: loss did not improve
Epoch 00293: loss did not improve
Epoch 00294: loss did not improve
Epoch 00295: loss did not improve
Epoch 00296: loss did not improve
Epoch 00297: loss did not improve
Epoch 00298: loss did not improve
Epoch 00299: loss did not improve
Epoch 00300: loss did not improve
Epoch 00301: loss did not improve
Epoch 00302: loss did not improve
Epoch 00303: loss did not improve
Epoch 00304: loss did not improve
Epoch 00305: loss did not improve
Epoch 00306: loss did not improve
Epoch 00307: loss did not improve
Epoch 00308: loss did not improve
Epoch 00309: loss did not improve
Epoch 00310: loss did not improve
Epoch 00311: loss did not improve
Epoch 00312: loss did not improve
Epoch 00313: loss did not improve
Epoch 00314: loss did not improve
Epoch 00315: loss did not improve
Epoch 00316: loss did not improve
Epoch 00317: loss did not improve
Epoch 00318: loss did not improve
Epoch 00319: loss did not improve
Epoch 00320: loss did not improve
Epoch 00321: loss did not improve
Epoch 00322: loss did not improve
Epoch 00323: loss improved from 150.25057 to 148.73660, saving model to models2/weights-improvement-323-0.00.hdf5
Epoch 00324: loss did not improve
Epoch 00325: loss did not improve
Epoch 00326: loss did not improve
Epoch 00327: loss did not improve
Epoch 00328: loss did not improve
Epoch 00329: loss did not improve
Epoch 00330: loss did not improve
Epoch 00331: loss did not improve
Epoch 00332: loss did not improve
Epoch 00333: loss did not improve
Epoch 00334: loss did not improve
Epoch 00335: loss did not improve
Epoch 00336: loss did not improve
Epoch 00337: loss did not improve
Epoch 00338: loss did not improve
Epoch 00339: loss did not improve
Epoch 00340: loss did not improve
Epoch 00341: loss did not improve
Epoch 00342: loss did not improve
Epoch 00343: loss did not improve
Epoch 00344: loss did not improve
Epoch 00345: loss did not improve
Epoch 00346: loss did not improve
Epoch 00347: loss did not improve
Epoch 00348: loss did not improve
Epoch 00349: loss did not improve
Epoch 00350: loss did not improve
Epoch 00351: loss did not improve
Epoch 00352: loss did not improve
Epoch 00353: loss did not improve
Epoch 00354: loss did not improve
Epoch 00355: loss did not improve
Epoch 00356: loss did not improve
Epoch 00357: loss did not improve
Epoch 00358: loss did not improve
Epoch 00359: loss did not improve
Epoch 00360: loss did not improve
Epoch 00361: loss did not improve
Epoch 00362: loss did not improve
Epoch 00363: loss did not improve
Epoch 00364: loss did not improve
Epoch 00365: loss did not improve
Epoch 00366: loss did not improve
Epoch 00367: loss did not improve
Epoch 00368: loss did not improve
Epoch 00369: loss did not improve
Epoch 00370: loss did not improve
Epoch 00371: loss did not improve
Epoch 00372: loss did not improve
Epoch 00373: loss did not improve
Epoch 00374: loss did not improve
Epoch 00375: loss did not improve
Epoch 00376: loss did not improve
Epoch 00377: loss did not improve
Epoch 00378: loss did not improve
Epoch 00379: loss did not improve
Epoch 00380: loss did not improve
Epoch 00381: loss did not improve
Epoch 00382: loss did not improve
Epoch 00383: loss did not improve
Epoch 00384: loss did not improve
Epoch 00385: loss did not improve
Epoch 00386: loss did not improve
Epoch 00387: loss did not improve
Epoch 00388: loss did not improve
Epoch 00389: loss did not improve
Epoch 00390: loss did not improve
Epoch 00391: loss did not improve
Epoch 00392: loss did not improve
Epoch 00393: loss did not improve
Epoch 00394: loss did not improve
Epoch 00395: loss did not improve
Epoch 00396: loss did not improve
Epoch 00397: loss did not improve
Epoch 00398: loss did not improve
Epoch 00399: loss did not improve
Epoch 00400: loss did not improve
Epoch 00401: loss did not improve
Epoch 00402: loss did not improve
Epoch 00403: loss did not improve
Epoch 00404: loss did not improve
Epoch 00405: loss did not improve
Epoch 00406: loss did not improve
Epoch 00407: loss did not improve
Epoch 00408: loss did not improve
Epoch 00409: loss did not improve
Epoch 00410: loss did not improve
Epoch 00411: loss did not improve
Epoch 00412: loss did not improve
Epoch 00413: loss did not improve
Epoch 00414: loss did not improve
Epoch 00415: loss did not improve
Epoch 00416: loss did not improve
Epoch 00417: loss did not improve
Epoch 00418: loss did not improve
Epoch 00419: loss did not improve
Epoch 00420: loss did not improve
Epoch 00421: loss did not improve
Epoch 00422: loss did not improve
Epoch 00423: loss did not improve
Epoch 00424: loss did not improve
Epoch 00425: loss did not improve
Epoch 00426: loss did not improve
Epoch 00427: loss did not improve
Epoch 00428: loss did not improve
Epoch 00429: loss did not improve
Epoch 00430: loss did not improve
Epoch 00431: loss did not improve
Epoch 00432: loss did not improve
Epoch 00433: loss did not improve
Epoch 00434: loss did not improve
Epoch 00435: loss did not improve
Epoch 00436: loss improved from 148.73660 to 138.77969, saving model to models2/weights-improvement-436-0.00.hdf5
Epoch 00437: loss did not improve
Epoch 00438: loss did not improve
Epoch 00439: loss did not improve
Epoch 00440: loss did not improve
Epoch 00441: loss did not improve
Epoch 00442: loss did not improve
Epoch 00443: loss did not improve
Epoch 00444: loss did not improve
Epoch 00445: loss did not improve
Epoch 00446: loss did not improve
Epoch 00447: loss did not improve
Epoch 00448: loss did not improve
Epoch 00449: loss did not improve
Epoch 00450: loss did not improve
Epoch 00451: loss did not improve
Epoch 00452: loss did not improve
Epoch 00453: loss did not improve
Epoch 00454: loss did not improve
Epoch 00455: loss did not improve
Epoch 00456: loss did not improve
Epoch 00457: loss did not improve
Epoch 00458: loss did not improve
Epoch 00459: loss did not improve
Epoch 00460: loss did not improve
Epoch 00461: loss did not improve
Epoch 00462: loss did not improve
Epoch 00463: loss did not improve
Epoch 00464: loss did not improve
Epoch 00465: loss did not improve
Epoch 00466: loss did not improve
Epoch 00467: loss did not improve
Epoch 00468: loss did not improve
Epoch 00469: loss did not improve
Epoch 00470: loss did not improve
Epoch 00471: loss did not improve
Epoch 00472: loss did not improve
Epoch 00473: loss did not improve
Epoch 00474: loss did not improve
Epoch 00475: loss did not improve
Epoch 00476: loss did not improve
Epoch 00477: loss did not improve
Epoch 00478: loss did not improve
Epoch 00479: loss did not improve
Epoch 00480: loss did not improve
Epoch 00481: loss did not improve
Epoch 00482: loss did not improve
Epoch 00483: loss did not improve
Epoch 00484: loss did not improve
Epoch 00485: loss did not improve
Epoch 00486: loss did not improve
Epoch 00487: loss did not improve
Epoch 00488: loss did not improve
Epoch 00489: loss did not improve
Epoch 00490: loss did not improve
Epoch 00491: loss did not improve
Epoch 00492: loss did not improve
Epoch 00493: loss did not improve
Epoch 00494: loss did not improve
Epoch 00495: loss did not improve
Epoch 00496: loss did not improve
Epoch 00497: loss did not improve
Epoch 00498: loss did not improve
Epoch 00499: loss did not improve
Epoch 00500: loss did not improve
Epoch 00501: loss did not improve
Epoch 00502: loss did not improve
Epoch 00503: loss did not improve
Epoch 00504: loss did not improve
Epoch 00505: loss did not improve
Epoch 00506: loss did not improve
Epoch 00507: loss did not improve
Epoch 00508: loss did not improve
Epoch 00509: loss did not improve
Epoch 00510: loss did not improve
Epoch 00511: loss did not improve
Epoch 00512: loss did not improve
Epoch 00513: loss did not improve
Epoch 00514: loss did not improve
Epoch 00515: loss did not improve
Epoch 00516: loss did not improve
Epoch 00517: loss did not improve
Epoch 00518: loss did not improve
Epoch 00519: loss did not improve
Epoch 00520: loss did not improve
Epoch 00521: loss did not improve
Epoch 00522: loss improved from 138.77969 to 129.70383, saving model to models2/weights-improvement-522-0.00.hdf5
Epoch 00523: loss did not improve
Epoch 00524: loss did not improve
Epoch 00525: loss did not improve
Epoch 00526: loss did not improve
Epoch 00527: loss did not improve
Epoch 00528: loss did not improve
Epoch 00529: loss did not improve
Epoch 00530: loss did not improve
Epoch 00531: loss did not improve
Epoch 00532: loss did not improve
Epoch 00533: loss did not improve
Epoch 00534: loss did not improve
Epoch 00535: loss did not improve
Epoch 00536: loss did not improve
Epoch 00537: loss did not improve
Epoch 00538: loss did not improve
Epoch 00539: loss did not improve
Epoch 00540: loss did not improve
Epoch 00541: loss did not improve
Epoch 00542: loss did not improve
Epoch 00543: loss did not improve
Epoch 00544: loss did not improve
Epoch 00545: loss did not improve
Epoch 00546: loss did not improve
Epoch 00547: loss did not improve
Epoch 00548: loss did not improve
Epoch 00549: loss did not improve
Epoch 00550: loss did not improve
Epoch 00551: loss improved from 129.70383 to 126.12952, saving model to models2/weights-improvement-551-0.00.hdf5
Epoch 00552: loss did not improve
Epoch 00553: loss did not improve
Epoch 00554: loss did not improve
Epoch 00555: loss did not improve
Epoch 00556: loss did not improve
Epoch 00557: loss did not improve
Epoch 00558: loss did not improve
Epoch 00559: loss did not improve
Epoch 00560: loss did not improve
Epoch 00561: loss did not improve
Epoch 00562: loss did not improve
Epoch 00563: loss did not improve
Epoch 00564: loss did not improve
Epoch 00565: loss did not improve
Epoch 00566: loss did not improve
Epoch 00567: loss did not improve
Epoch 00568: loss did not improve
Epoch 00569: loss did not improve
Epoch 00570: loss did not improve
Epoch 00571: loss did not improve
Epoch 00572: loss did not improve
Epoch 00573: loss did not improve
Epoch 00574: loss did not improve
Epoch 00575: loss did not improve
Epoch 00576: loss did not improve
Epoch 00577: loss did not improve
Epoch 00578: loss did not improve
Epoch 00579: loss did not improve
Epoch 00580: loss did not improve
Epoch 00581: loss did not improve
Epoch 00582: loss did not improve
Epoch 00583: loss did not improve
Epoch 00584: loss did not improve
Epoch 00585: loss did not improve
Epoch 00586: loss did not improve
Epoch 00587: loss did not improve
Epoch 00588: loss did not improve
Epoch 00589: loss did not improve
Epoch 00590: loss did not improve
Epoch 00591: loss did not improve
Epoch 00592: loss did not improve
Epoch 00593: loss did not improve
Epoch 00594: loss did not improve
Epoch 00595: loss did not improve
Epoch 00596: loss did not improve
Epoch 00597: loss did not improve
Epoch 00598: loss did not improve
Epoch 00599: loss did not improve
Epoch 00600: loss did not improve
Epoch 00601: loss did not improve
Epoch 00602: loss did not improve
Epoch 00603: loss did not improve
Epoch 00604: loss did not improve
Epoch 00605: loss did not improve
Epoch 00606: loss did not improve
Epoch 00607: loss did not improve
Epoch 00608: loss did not improve
Epoch 00609: loss did not improve
Epoch 00610: loss did not improve
Epoch 00611: loss did not improve
Epoch 00612: loss did not improve
Epoch 00613: loss did not improve
Epoch 00614: loss did not improve
Epoch 00615: loss did not improve
Epoch 00616: loss did not improve
Epoch 00617: loss did not improve
Epoch 00618: loss did not improve
Epoch 00619: loss did not improve
Epoch 00620: loss did not improve
Epoch 00621: loss did not improve
Epoch 00622: loss did not improve
Epoch 00623: loss did not improve
Epoch 00624: loss did not improve
Epoch 00625: loss did not improve
Epoch 00626: loss did not improve
Epoch 00627: loss did not improve
Epoch 00628: loss did not improve
Epoch 00629: loss did not improve
Epoch 00630: loss did not improve
Epoch 00631: loss did not improve
Epoch 00632: loss did not improve
Epoch 00633: loss did not improve
Epoch 00634: loss did not improve
Epoch 00635: loss did not improve
Epoch 00636: loss did not improve
Epoch 00637: loss did not improve
Epoch 00638: loss did not improve
Epoch 00639: loss did not improve
Epoch 00640: loss did not improve
Epoch 00641: loss did not improve
Epoch 00642: loss did not improve
Epoch 00643: loss did not improve
Epoch 00644: loss did not improve
Epoch 00645: loss did not improve
Epoch 00646: loss did not improve
Epoch 00647: loss did not improve
Epoch 00648: loss did not improve
Epoch 00649: loss did not improve
Epoch 00650: loss did not improve
Epoch 00651: loss did not improve
Epoch 00652: loss did not improve
Epoch 00653: loss did not improve
Epoch 00654: loss did not improve
Epoch 00655: loss did not improve
Epoch 00656: loss did not improve
Epoch 00657: loss did not improve
Epoch 00658: loss did not improve
Epoch 00659: loss did not improve
Epoch 00660: loss did not improve
Epoch 00661: loss did not improve
Epoch 00662: loss did not improve
Epoch 00663: loss improved from 126.12952 to 125.86361, saving model to models2/weights-improvement-663-0.00.hdf5
Epoch 00664: loss did not improve
Epoch 00665: loss did not improve
Epoch 00666: loss did not improve
Epoch 00667: loss did not improve
Epoch 00668: loss did not improve
Epoch 00669: loss did not improve
Epoch 00670: loss did not improve
Epoch 00671: loss did not improve
Epoch 00672: loss did not improve
Epoch 00673: loss did not improve
Epoch 00674: loss did not improve
Epoch 00675: loss did not improve
Epoch 00676: loss did not improve
Epoch 00677: loss did not improve
Epoch 00678: loss did not improve
Epoch 00679: loss did not improve
Epoch 00680: loss did not improve
Epoch 00681: loss did not improve
Epoch 00682: loss did not improve
Epoch 00683: loss did not improve
Epoch 00684: loss did not improve
Epoch 00685: loss did not improve
Epoch 00686: loss did not improve
Epoch 00687: loss did not improve
Epoch 00688: loss did not improve
Epoch 00689: loss did not improve
Epoch 00690: loss did not improve
Epoch 00691: loss did not improve
Epoch 00692: loss did not improve
Epoch 00693: loss did not improve
Epoch 00694: loss did not improve
Epoch 00695: loss did not improve
Epoch 00696: loss did not improve
Epoch 00697: loss did not improve
Epoch 00698: loss did not improve
Epoch 00699: loss did not improve
Epoch 00700: loss did not improve
Epoch 00701: loss did not improve
Epoch 00702: loss did not improve
Epoch 00703: loss did not improve
Epoch 00704: loss did not improve
Epoch 00705: loss did not improve
Epoch 00706: loss did not improve
Epoch 00707: loss did not improve
Epoch 00708: loss did not improve
Epoch 00709: loss did not improve
Epoch 00710: loss did not improve
Epoch 00711: loss did not improve
Epoch 00712: loss did not improve
Epoch 00713: loss did not improve
Epoch 00714: loss did not improve
Epoch 00715: loss did not improve
Epoch 00716: loss did not improve
Epoch 00717: loss did not improve
Epoch 00718: loss did not improve
Epoch 00719: loss did not improve
Epoch 00720: loss did not improve
Epoch 00721: loss did not improve
Epoch 00722: loss did not improve
Epoch 00723: loss did not improve
Epoch 00724: loss did not improve
Epoch 00725: loss did not improve
Epoch 00726: loss did not improve
Epoch 00727: loss did not improve
Epoch 00728: loss did not improve
Epoch 00729: loss did not improve
Epoch 00730: loss did not improve
Epoch 00731: loss did not improve
Epoch 00732: loss did not improve
Epoch 00733: loss did not improve
Epoch 00734: loss did not improve
Epoch 00735: loss did not improve
Epoch 00736: loss did not improve
Epoch 00737: loss did not improve
Epoch 00738: loss did not improve
Epoch 00739: loss did not improve
Epoch 00740: loss did not improve
Epoch 00741: loss did not improve
Epoch 00742: loss did not improve
Epoch 00743: loss did not improve
Epoch 00744: loss did not improve
Epoch 00745: loss did not improve
Epoch 00746: loss did not improve
Epoch 00747: loss did not improve
Epoch 00748: loss did not improve
Epoch 00749: loss did not improve
Epoch 00750: loss did not improve
Epoch 00751: loss did not improve
Epoch 00752: loss did not improve
Epoch 00753: loss did not improve
Epoch 00754: loss did not improve
Epoch 00755: loss did not improve
Epoch 00756: loss did not improve
Epoch 00757: loss did not improve
Epoch 00758: loss did not improve
Epoch 00759: loss did not improve
Epoch 00760: loss did not improve
Epoch 00761: loss did not improve
Epoch 00762: loss did not improve
Epoch 00763: loss did not improve
Epoch 00764: loss did not improve
Epoch 00765: loss did not improve
Epoch 00766: loss did not improve
Epoch 00767: loss did not improve
Epoch 00768: loss did not improve
Epoch 00769: loss did not improve
Epoch 00770: loss did not improve
Epoch 00771: loss did not improve
Epoch 00772: loss did not improve
Epoch 00773: loss did not improve
Epoch 00774: loss did not improve
Epoch 00775: loss did not improve
Epoch 00776: loss did not improve
Epoch 00777: loss did not improve
Epoch 00778: loss did not improve
Epoch 00779: loss did not improve
Epoch 00780: loss did not improve
Epoch 00781: loss did not improve
Epoch 00782: loss did not improve
Epoch 00783: loss did not improve
Epoch 00784: loss did not improve
Epoch 00785: loss did not improve
Epoch 00786: loss did not improve
Epoch 00787: loss did not improve
Epoch 00788: loss did not improve
Epoch 00789: loss did not improve
Epoch 00790: loss did not improve
Epoch 00791: loss did not improve
Epoch 00792: loss did not improve
Epoch 00793: loss did not improve
Epoch 00794: loss did not improve
Epoch 00795: loss did not improve
Epoch 00796: loss did not improve
Epoch 00797: loss did not improve
Epoch 00798: loss did not improve
Epoch 00799: loss did not improve
Epoch 00800: loss did not improve
Epoch 00801: loss did not improve
Epoch 00802: loss did not improve
Epoch 00803: loss did not improve
Epoch 00804: loss did not improve
Epoch 00805: loss did not improve
Epoch 00806: loss did not improve
Epoch 00807: loss did not improve
Epoch 00808: loss did not improve
Epoch 00809: loss did not improve
Epoch 00810: loss did not improve
Epoch 00811: loss did not improve
Epoch 00812: loss did not improve
Epoch 00813: loss did not improve
Epoch 00814: loss did not improve
Epoch 00815: loss did not improve
Epoch 00816: loss did not improve
Epoch 00817: loss did not improve
Epoch 00818: loss did not improve
Epoch 00819: loss did not improve
Epoch 00820: loss did not improve
Epoch 00821: loss did not improve
Epoch 00822: loss did not improve
Epoch 00823: loss did not improve
Epoch 00824: loss did not improve
Epoch 00825: loss improved from 125.86361 to 107.60916, saving model to models2/weights-improvement-825-0.00.hdf5
Epoch 00826: loss did not improve
Epoch 00827: loss did not improve
Epoch 00828: loss did not improve
Epoch 00829: loss did not improve
Epoch 00830: loss did not improve
Epoch 00831: loss did not improve
Epoch 00832: loss did not improve
Epoch 00833: loss did not improve
Epoch 00834: loss did not improve
Epoch 00835: loss did not improve
Epoch 00836: loss did not improve
Epoch 00837: loss did not improve
Epoch 00838: loss did not improve
Epoch 00839: loss did not improve
Epoch 00840: loss did not improve
Epoch 00841: loss did not improve
Epoch 00842: loss did not improve
Epoch 00843: loss did not improve
Epoch 00844: loss did not improve
Epoch 00845: loss did not improve
Epoch 00846: loss did not improve
Epoch 00847: loss did not improve
Epoch 00848: loss did not improve
Epoch 00849: loss did not improve
Epoch 00850: loss did not improve
Epoch 00851: loss did not improve
Epoch 00852: loss did not improve
Epoch 00853: loss did not improve
Epoch 00854: loss did not improve
Epoch 00855: loss did not improve
Epoch 00856: loss did not improve
Epoch 00857: loss did not improve
Epoch 00858: loss did not improve
Epoch 00859: loss did not improve
Epoch 00860: loss did not improve
Epoch 00861: loss did not improve
Epoch 00862: loss did not improve
Epoch 00863: loss did not improve
Epoch 00864: loss did not improve
Epoch 00865: loss did not improve
Epoch 00866: loss did not improve
Epoch 00867: loss did not improve
Epoch 00868: loss did not improve
Epoch 00869: loss did not improve
Epoch 00870: loss did not improve
Epoch 00871: loss did not improve
Epoch 00872: loss did not improve
Epoch 00873: loss did not improve
Epoch 00874: loss did not improve
Epoch 00875: loss did not improve
Epoch 00876: loss did not improve
Epoch 00877: loss did not improve
Epoch 00878: loss did not improve
Epoch 00879: loss did not improve
Epoch 00880: loss did not improve
Epoch 00881: loss did not improve
Epoch 00882: loss did not improve
Epoch 00883: loss did not improve
Epoch 00884: loss did not improve
Epoch 00885: loss did not improve
Epoch 00886: loss did not improve
Epoch 00887: loss did not improve
Epoch 00888: loss did not improve
Epoch 00889: loss did not improve
Epoch 00890: loss did not improve
Epoch 00891: loss did not improve
Epoch 00892: loss did not improve
Epoch 00893: loss did not improve
Epoch 00894: loss did not improve
Epoch 00895: loss did not improve
Epoch 00896: loss did not improve
Epoch 00897: loss did not improve
Epoch 00898: loss did not improve
Epoch 00899: loss did not improve
Epoch 00900: loss did not improve
Epoch 00901: loss did not improve
Epoch 00902: loss did not improve
Epoch 00903: loss did not improve
Epoch 00904: loss did not improve
Epoch 00905: loss did not improve
Epoch 00906: loss did not improve
Epoch 00907: loss did not improve
Epoch 00908: loss did not improve
Epoch 00909: loss did not improve
Epoch 00910: loss did not improve
Epoch 00911: loss did not improve
Epoch 00912: loss did not improve
Epoch 00913: loss did not improve
Epoch 00914: loss did not improve
Epoch 00915: loss did not improve
Epoch 00916: loss did not improve
Epoch 00917: loss did not improve
Epoch 00918: loss did not improve
Epoch 00919: loss did not improve
Epoch 00920: loss did not improve
Epoch 00921: loss did not improve
Epoch 00922: loss did not improve
Epoch 00923: loss did not improve
Epoch 00924: loss did not improve
Epoch 00925: loss did not improve
Epoch 00926: loss did not improve
Epoch 00927: loss did not improve
Epoch 00928: loss did not improve
Epoch 00929: loss did not improve
Epoch 00930: loss did not improve
Epoch 00931: loss did not improve
Epoch 00932: loss did not improve
Epoch 00933: loss did not improve
Epoch 00934: loss did not improve
Epoch 00935: loss did not improve
Epoch 00936: loss did not improve
Epoch 00937: loss did not improve
Epoch 00938: loss did not improve
Epoch 00939: loss did not improve
Epoch 00940: loss did not improve
Epoch 00941: loss did not improve
Epoch 00942: loss did not improve
Epoch 00943: loss did not improve
Epoch 00944: loss did not improve
Epoch 00945: loss did not improve
Epoch 00946: loss did not improve
Epoch 00947: loss did not improve
Epoch 00948: loss did not improve
Epoch 00949: loss did not improve
Epoch 00950: loss did not improve
Epoch 00951: loss did not improve
Epoch 00952: loss did not improve
Epoch 00953: loss did not improve
Epoch 00954: loss did not improve
Epoch 00955: loss did not improve
Epoch 00956: loss did not improve
Epoch 00957: loss did not improve
Epoch 00958: loss did not improve
Epoch 00959: loss did not improve
Epoch 00960: loss did not improve
Epoch 00961: loss did not improve
Epoch 00962: loss did not improve
Epoch 00963: loss did not improve
Epoch 00964: loss did not improve
Epoch 00965: loss did not improve
Epoch 00966: loss did not improve
Epoch 00967: loss did not improve
Epoch 00968: loss did not improve
Epoch 00969: loss did not improve
Epoch 00970: loss did not improve
Epoch 00971: loss did not improve
Epoch 00972: loss did not improve
Epoch 00973: loss did not improve
Epoch 00974: loss did not improve
Epoch 00975: loss did not improve
Epoch 00976: loss did not improve
Epoch 00977: loss did not improve
Epoch 00978: loss did not improve
Epoch 00979: loss did not improve
Epoch 00980: loss did not improve
Epoch 00981: loss did not improve
Epoch 00982: loss did not improve
Epoch 00983: loss did not improve
Epoch 00984: loss did not improve
Epoch 00985: loss did not improve
Epoch 00986: loss did not improve
Epoch 00987: loss did not improve
Epoch 00988: loss did not improve
Epoch 00989: loss did not improve
Epoch 00990: loss did not improve
Epoch 00991: loss did not improve
Epoch 00992: loss did not improve
Epoch 00993: loss did not improve
Epoch 00994: loss did not improve
Epoch 00995: loss did not improve
Epoch 00996: loss did not improve
Epoch 00997: loss did not improve
Epoch 00998: loss did not improve
Epoch 00999: loss did not improve
Epoch 01000: loss did not improve
Out[87]:
<keras.callbacks.History at 0x7f95172de048>
In [51]:
###Load best model

model = models.load_model('models/weights-improvement-975-0.00.hdf5')
model.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_23 (InputLayer)        (None, 1)                 0         
_________________________________________________________________
Dense_l1 (Dense)             (None, 75)                150       
_________________________________________________________________
LRelu_l1 (LeakyReLU)         (None, 75)                0         
_________________________________________________________________
Dense_l2 (Dense)             (None, 150)               11400     
_________________________________________________________________
LRelu_l2 (LeakyReLU)         (None, 150)               0         
_________________________________________________________________
Dense_l3 (Dense)             (None, 75)                11325     
_________________________________________________________________
LRelu_l3 (LeakyReLU)         (None, 75)                0         
_________________________________________________________________
Dense_l4 (Dense)             (None, 1)                 76        
_________________________________________________________________
Output (LeakyReLU)           (None, 1)                 0         
=================================================================
Total params: 22,951
Trainable params: 22,951
Non-trainable params: 0
_________________________________________________________________
In [53]:
###Load best model for detah prediction

model_death_pred = models.load_model('models2/weights-improvement-825-0.00.hdf5')
model_death_pred.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_2 (InputLayer)         (None, 1)                 0         
_________________________________________________________________
Dense_l1 (Dense)             (None, 75)                150       
_________________________________________________________________
LRelu_l1 (LeakyReLU)         (None, 75)                0         
_________________________________________________________________
Dense_l2 (Dense)             (None, 150)               11400     
_________________________________________________________________
LRelu_l2 (LeakyReLU)         (None, 150)               0         
_________________________________________________________________
Dense_l3 (Dense)             (None, 75)                11325     
_________________________________________________________________
LRelu_l3 (LeakyReLU)         (None, 75)                0         
_________________________________________________________________
Dense_l4 (Dense)             (None, 1)                 76        
_________________________________________________________________
Output (LeakyReLU)           (None, 1)                 0         
=================================================================
Total params: 22,951
Trainable params: 22,951
Non-trainable params: 0
_________________________________________________________________
In [54]:
prediction_days=10
pred_data = model.predict(np.arange(1,len(data_y) + prediction_days+1))
pred_data_deaths = model_death_pred.predict(np.arange(1,len(data_y_deaths) + prediction_days+1))
In [55]:
###Plot Deep learning predictions against actual data

plt.figure(figsize=(15,5))
plt.title('Coronavirus Cases over time in USA- Actual vs Predicted(Deep Learning)', size=25)
plt.xlabel('No. of Days starting 01/22', size=20)
plt.ylabel('No. of Confirmed Cases', size=20)
plt.grid(True,which='minor')
plt.grid(color='b', linestyle='--', linewidth=0.1)
plt.legend()
plt.xticks(np.arange(min(data_x), len(pred_days)+1, 2))
plt.plot(np.arange(1,len(pred_days)+1),pred_data,color='red', label="Deep learning predicted Data")
plt.plot(data_x,np.array(full_table_usa.groupby('Date')['Confirmed'].sum()), color='blue', label="Actual Data")
plt.legend(fontsize =18)
plt.savefig('plots/deeplearning_predictions.png')
In [56]:
###Plot Deep learning predictions against actual data

plt.figure(figsize=(15,5))
plt.title('Coronavirus Deaths over time in USA- Actual vs Predicted(Deep Learning)', size=25)
plt.xlabel('No. of Days starting 01/22', size=20)
plt.ylabel('No. of Deaths', size=20)
plt.grid(True,which='minor')
plt.grid(color='b', linestyle='--', linewidth=0.1)
plt.legend()
plt.xticks(np.arange(min(data_x), len(pred_days)+1, 2))
plt.plot(np.arange(1,len(pred_days)+1),pred_data_deaths,color='red', label="Deep learning predicted Data")
plt.plot(data_x,np.array(full_table_usa.groupby('Date')['Deaths'].sum()), color='blue', label="Actual Data")
plt.legend(fontsize =18)
plt.savefig('plots/deeplearning_death_predictions.png')
In [ ]: